• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.*;
20 
21 import android.graphics.ImageFormat;
22 import android.view.Surface;
23 
24 import com.android.ex.camera2.blocking.BlockingSessionCallback;
25 
26 import android.graphics.SurfaceTexture;
27 import android.hardware.camera2.CameraCaptureSession;
28 import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
29 import android.hardware.camera2.CameraDevice;
30 import android.hardware.camera2.CaptureFailure;
31 import android.hardware.camera2.CaptureRequest;
32 import android.hardware.camera2.CaptureResult;
33 import android.hardware.camera2.TotalCaptureResult;
34 import android.util.Size;
35 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
36 import android.hardware.camera2.cts.helpers.StaticMetadata;
37 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
38 import android.hardware.camera2.params.OutputConfiguration;
39 import android.hardware.camera2.params.SessionConfiguration;
40 import android.util.Log;
41 import android.util.Pair;
42 import android.util.Range;
43 import android.view.SurfaceView;
44 import android.view.SurfaceHolder;
45 
46 import org.mockito.ArgumentCaptor;
47 import org.mockito.ArgumentMatcher;
48 
49 import static org.mockito.Mockito.*;
50 
51 import java.util.ArrayList;
52 import java.util.Arrays;
53 import java.util.List;
54 import java.util.HashMap;
55 
56 import org.junit.runners.Parameterized;
57 import org.junit.runner.RunWith;
58 import org.junit.Test;
59 
60 /**
61  * CameraDevice preview test by using SurfaceView.
62  */
63 
64 @RunWith(Parameterized.class)
65 public class SurfaceViewPreviewTest extends Camera2SurfaceViewTestCase {
66     private static final String TAG = "SurfaceViewPreviewTest";
67     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
68     private static final int FRAME_TIMEOUT_MS = 1000;
69     private static final int NUM_FRAMES_VERIFIED = 30;
70     private static final int NUM_TEST_PATTERN_FRAMES_VERIFIED = 60;
71     private static final float FRAME_DURATION_ERROR_MARGIN = 0.01f; // 1 percent error margin.
72     private static final int PREPARE_TIMEOUT_MS = 10000; // 10 s
73 
74     @Override
setUp()75     public void setUp() throws Exception {
76         super.setUp();
77     }
78 
79     @Override
tearDown()80     public void tearDown() throws Exception {
81         super.tearDown();
82     }
83 
84     /**
85      * Test all supported preview sizes for each camera device.
86      * <p>
87      * For the first  {@link #NUM_FRAMES_VERIFIED}  of capture results,
88      * the {@link CaptureCallback} callback availability and the capture timestamp
89      * (monotonically increasing) ordering are verified.
90      * </p>
91      */
92     @Test
testCameraPreview()93     public void testCameraPreview() throws Exception {
94         for (int i = 0; i < mCameraIdsUnderTest.length; i++) {
95             try {
96                 Log.i(TAG, "Testing preview for Camera " + mCameraIdsUnderTest[i]);
97                 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isColorOutputSupported()) {
98                     Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] +
99                             " does not support color outputs, skipping");
100                     continue;
101                 }
102                 openDevice(mCameraIdsUnderTest[i]);
103                 previewTestByCamera();
104             } finally {
105                 closeDevice();
106             }
107         }
108     }
109 
110     /**
111      * Basic test pattern mode preview.
112      * <p>
113      * Only test the test pattern preview and capture result, the image buffer
114      * is not validated.
115      * </p>
116      */
117     @Test
testBasicTestPatternPreview()118     public void testBasicTestPatternPreview() throws Exception{
119         for (int i = 0; i < mCameraIdsUnderTest.length; i++) {
120             try {
121                 Log.i(TAG, "Testing preview for Camera " + mCameraIdsUnderTest[i]);
122                 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isColorOutputSupported()) {
123                     Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] +
124                             " does not support color outputs, skipping");
125                     continue;
126                 }
127                 openDevice(mCameraIdsUnderTest[i]);
128                 previewTestPatternTestByCamera();
129             } finally {
130                 closeDevice();
131             }
132         }
133     }
134 
135     /**
136      * Test {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE} for preview, validate the preview
137      * frame duration and exposure time.
138      */
139     @Test
testPreviewFpsRange()140     public void testPreviewFpsRange() throws Exception {
141         for (String id : mCameraIdsUnderTest) {
142             try {
143                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
144                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
145                     continue;
146                 }
147                 openDevice(id);
148                 previewFpsRangeTestByCamera();
149             } finally {
150                 closeDevice();
151             }
152         }
153     }
154 
155     /**
156      * Test surface set streaming use cases.
157      *
158      * <p>
159      * The test sets output configuration with increasing surface set IDs for preview and YUV
160      * streams. The max supported preview size is selected for preview stream, and the max
161      * supported YUV size (depending on hw supported level) is selected for YUV stream. This test
162      * also exercises the prepare API.
163      * </p>
164      */
165     @Test
testSurfaceSet()166     public void testSurfaceSet() throws Exception {
167         for (String id : mCameraIdsUnderTest) {
168             try {
169                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
170                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
171                     continue;
172                 }
173                 openDevice(id);
174                 surfaceSetTestByCamera(id);
175             } finally {
176                 closeDevice();
177             }
178         }
179     }
180 
181     /**
182      * Test to verify the {@link CameraCaptureSession#prepare} method works correctly, and has the
183      * expected effects on performance.
184      *
185      * - Ensure that prepare() results in onSurfacePrepared() being invoked
186      * - Ensure that prepare() does not cause preview glitches while operating
187      * - Ensure that starting to use a newly-prepared output does not cause additional
188      *   preview glitches to occur
189      */
190     @Test
testPreparePerformance()191     public void testPreparePerformance() throws Throwable {
192         for (int i = 0; i < mCameraIdsUnderTest.length; i++) {
193             try {
194                 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isColorOutputSupported()) {
195                     Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] +
196                             " does not support color outputs, skipping");
197                     continue;
198                 }
199                 openDevice(mCameraIdsUnderTest[i]);
200                 preparePerformanceTestByCamera(mCameraIdsUnderTest[i]);
201             }
202             finally {
203                 closeDevice();
204             }
205         }
206     }
207 
preparePerformanceTestByCamera(String cameraId)208     private void preparePerformanceTestByCamera(String cameraId) throws Exception {
209         final int MAX_IMAGES_TO_PREPARE = 10;
210         final int UNKNOWN_LATENCY_RESULT_WAIT = 5;
211         final int MAX_RESULTS_TO_WAIT = 10;
212         final int FRAMES_FOR_AVERAGING = 100;
213         final float PREPARE_FRAME_RATE_BOUNDS = 0.05f; // fraction allowed difference
214         final float PREPARE_PEAK_RATE_BOUNDS = 0.5f; // fraction allowed difference
215 
216         Size maxYuvSize = getSupportedPreviewSizes(cameraId, mCameraManager, null).get(0);
217         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
218 
219         // Don't need image data, just drop it right away to minimize overhead
220         ImageDropperListener imageListener = new ImageDropperListener();
221 
222         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
223 
224         CaptureRequest.Builder previewRequest =
225                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
226 
227         // Configure outputs and session
228 
229         updatePreviewSurface(maxPreviewSize);
230 
231         createImageReader(maxYuvSize, ImageFormat.YUV_420_888, MAX_IMAGES_TO_PREPARE, imageListener);
232         HashMap<Size, Long> yuvMinFrameDurations =
233                 mStaticInfo.getAvailableMinFrameDurationsForFormatChecked(ImageFormat.YUV_420_888);
234         Long readerMinFrameDuration = yuvMinFrameDurations.get(maxYuvSize);
235 
236         List<Surface> outputSurfaces = new ArrayList<Surface>();
237         outputSurfaces.add(mPreviewSurface);
238         outputSurfaces.add(mReaderSurface);
239 
240         CameraCaptureSession.StateCallback mockSessionListener =
241                 mock(CameraCaptureSession.StateCallback.class);
242 
243         mSession = configureCameraSession(mCamera, outputSurfaces, mockSessionListener, mHandler);
244 
245         previewRequest.addTarget(mPreviewSurface);
246         Range<Integer> maxFpsTarget = mStaticInfo.getAeMaxTargetFpsRange();
247         previewRequest.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, maxFpsTarget);
248 
249         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
250 
251         // Converge AE
252         waitForAeStable(resultListener, UNKNOWN_LATENCY_RESULT_WAIT);
253 
254         if (mStaticInfo.isAeLockSupported()) {
255             // Lock AE if possible to improve stability
256             previewRequest.set(CaptureRequest.CONTROL_AE_LOCK, true);
257             mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
258             if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
259                 // Legacy mode doesn't output AE state
260                 waitForResultValue(resultListener, CaptureResult.CONTROL_AE_STATE,
261                         CaptureResult.CONTROL_AE_STATE_LOCKED, MAX_RESULTS_TO_WAIT);
262             }
263         }
264 
265         // Measure frame rate for a bit
266         Pair<Long, Long> frameDurationStats =
267                 measureMeanFrameInterval(resultListener, FRAMES_FOR_AVERAGING, /*prevTimestamp*/ 0);
268 
269         Log.i(TAG, String.format("Frame interval avg during normal preview: %f ms, peak %f ms",
270                         frameDurationStats.first / 1e6, frameDurationStats.second / 1e6));
271 
272         // Drain results, do prepare
273         resultListener.drain();
274 
275         mSession.prepare(mReaderSurface);
276 
277         verify(mockSessionListener,
278                 timeout(PREPARE_TIMEOUT_MS).times(1)).
279                 onSurfacePrepared(eq(mSession), eq(mReaderSurface));
280 
281         resultListener.drain();
282 
283         // Get at least one more preview result without prepared target
284         CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
285         long prevTimestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
286 
287         // Now use the prepared stream and ensure there are no hiccups from using it
288         previewRequest.addTarget(mReaderSurface);
289 
290         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
291 
292         Pair<Long, Long> preparedFrameDurationStats =
293                 measureMeanFrameInterval(resultListener, MAX_IMAGES_TO_PREPARE*2, prevTimestamp);
294 
295         Log.i(TAG, String.format("Frame interval with prepared stream added avg: %f ms, peak %f ms",
296                         preparedFrameDurationStats.first / 1e6,
297                         preparedFrameDurationStats.second / 1e6));
298 
299         if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
300             mCollector.expectTrue(
301                 String.format("Camera %s: Preview peak frame interval affected by use of new " +
302                         " stream: preview peak frame interval: %f ms, peak with new stream: %f ms",
303                         cameraId,
304                         frameDurationStats.second / 1e6, preparedFrameDurationStats.second / 1e6),
305                 (preparedFrameDurationStats.second <=
306                         Math.max(frameDurationStats.second, readerMinFrameDuration) *
307                         (1 + PREPARE_PEAK_RATE_BOUNDS)));
308             mCollector.expectTrue(
309                 String.format("Camera %s: Preview average frame interval affected by use of new " +
310                         "stream: preview avg frame duration: %f ms, with new stream: %f ms",
311                         cameraId,
312                         frameDurationStats.first / 1e6, preparedFrameDurationStats.first / 1e6),
313                 (preparedFrameDurationStats.first <=
314                         Math.max(frameDurationStats.first, readerMinFrameDuration) *
315                         (1 + PREPARE_FRAME_RATE_BOUNDS)));
316         }
317     }
318 
319     /**
320      * Test to verify correct behavior with the same Surface object being used repeatedly with
321      * different native internals, and multiple Surfaces pointing to the same actual consumer object
322      */
323     @Test
testSurfaceEquality()324     public void testSurfaceEquality() throws Exception {
325         for (int i = 0; i < mCameraIdsUnderTest.length; i++) {
326             try {
327                 if (!mAllStaticInfo.get(mCameraIdsUnderTest[i]).isColorOutputSupported()) {
328                     Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] +
329                             " does not support color outputs, skipping");
330                     continue;
331                 }
332                 openDevice(mCameraIdsUnderTest[i]);
333                 surfaceEqualityTestByCamera(mCameraIdsUnderTest[i]);
334             }
335             finally {
336                 closeDevice();
337             }
338         }
339     }
340 
surfaceEqualityTestByCamera(String cameraId)341     private void surfaceEqualityTestByCamera(String cameraId) throws Exception {
342         final int SOME_FRAMES = 10;
343 
344         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
345 
346         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
347 
348         // Create a SurfaceTexture for a second output
349         SurfaceTexture sharedOutputTexture = new SurfaceTexture(/*random texture ID*/ 5);
350         sharedOutputTexture.setDefaultBufferSize(maxPreviewSize.getWidth(),
351                 maxPreviewSize.getHeight());
352         Surface sharedOutputSurface1 = new Surface(sharedOutputTexture);
353 
354         updatePreviewSurface(maxPreviewSize);
355 
356         List<Surface> outputSurfaces = new ArrayList<Surface>();
357         outputSurfaces.add(mPreviewSurface);
358         outputSurfaces.add(sharedOutputSurface1);
359 
360         BlockingSessionCallback sessionListener =
361                 new BlockingSessionCallback();
362 
363         mSession = configureCameraSession(mCamera, outputSurfaces, sessionListener, mHandler);
364         sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY,
365                 SESSION_CONFIGURE_TIMEOUT_MS);
366 
367         CaptureRequest.Builder previewRequest =
368                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
369         previewRequest.addTarget(mPreviewSurface);
370         previewRequest.addTarget(sharedOutputSurface1);
371 
372         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
373 
374         // Wait to get some frames out
375         waitForNumResults(resultListener, SOME_FRAMES);
376 
377         // Drain
378         mSession.abortCaptures();
379         sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY,
380                 SESSION_CONFIGURE_TIMEOUT_MS);
381 
382         // Hide / unhide the SurfaceView to get a new target Surface
383         recreatePreviewSurface();
384 
385         // And resize it again
386         updatePreviewSurface(maxPreviewSize);
387 
388         // Create a second surface that targets the shared SurfaceTexture
389         Surface sharedOutputSurface2 = new Surface(sharedOutputTexture);
390 
391         // Use the new Surfaces for a new session
392         outputSurfaces.clear();
393         outputSurfaces.add(mPreviewSurface);
394         outputSurfaces.add(sharedOutputSurface2);
395 
396         sessionListener = new BlockingSessionCallback();
397 
398         mSession = configureCameraSession(mCamera, outputSurfaces, sessionListener, mHandler);
399 
400         previewRequest =
401                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
402         previewRequest.addTarget(mPreviewSurface);
403         previewRequest.addTarget(sharedOutputSurface2);
404 
405         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
406 
407         // Wait to get some frames out
408         waitForNumResults(resultListener, SOME_FRAMES);
409     }
410 
411     /*
412      * Verify creation of deferred surface capture sessions
413      */
414     @Test
testDeferredSurfaces()415     public void testDeferredSurfaces() throws Exception {
416         for (int i = 0; i < mCameraIdsUnderTest.length; i++) {
417             try {
418                 StaticMetadata staticInfo = mAllStaticInfo.get(mCameraIdsUnderTest[i]);
419                 if (staticInfo.isHardwareLevelLegacy()) {
420                     Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] + " is legacy, skipping");
421                     continue;
422                 }
423                 if (!staticInfo.isColorOutputSupported()) {
424                     Log.i(TAG, "Camera " + mCameraIdsUnderTest[i] +
425                             " does not support color outputs, skipping");
426                     continue;
427                 }
428 
429                 openDevice(mCameraIdsUnderTest[i]);
430                 testDeferredSurfacesByCamera(mCameraIdsUnderTest[i]);
431             }
432             finally {
433                 closeDevice();
434             }
435         }
436     }
437 
testDeferredSurfacesByCamera(String cameraId)438     private void testDeferredSurfacesByCamera(String cameraId) throws Exception {
439         Size maxPreviewSize = m1080pBoundedOrderedPreviewSizes.get(0);
440 
441         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
442 
443         // Create a SurfaceTexture for a second output
444         SurfaceTexture sharedOutputTexture = new SurfaceTexture(/*random texture ID*/ 5);
445         sharedOutputTexture.setDefaultBufferSize(maxPreviewSize.getWidth(),
446                 maxPreviewSize.getHeight());
447         Surface sharedOutputSurface1 = new Surface(sharedOutputTexture);
448 
449         class TextureAvailableListener implements SurfaceTexture.OnFrameAvailableListener {
450             @Override
451             public void onFrameAvailable(SurfaceTexture t) {
452                 mGotFrame = true;
453             }
454             public boolean gotFrame() { return mGotFrame; }
455 
456             private volatile boolean mGotFrame = false;
457         }
458         TextureAvailableListener textureAvailableListener = new TextureAvailableListener();
459 
460         sharedOutputTexture.setOnFrameAvailableListener(textureAvailableListener, mHandler);
461 
462         updatePreviewSurface(maxPreviewSize);
463 
464         // Create deferred outputs for surface view and surface texture
465         OutputConfiguration surfaceViewOutput = new OutputConfiguration(maxPreviewSize,
466                 SurfaceHolder.class);
467         OutputConfiguration surfaceTextureOutput = new OutputConfiguration(maxPreviewSize,
468                 SurfaceTexture.class);
469 
470         List<OutputConfiguration> outputSurfaces = new ArrayList<>();
471         outputSurfaces.add(surfaceViewOutput);
472         outputSurfaces.add(surfaceTextureOutput);
473 
474         // Create non-deferred ImageReader output (JPEG for LIMITED-level compatibility)
475         ImageDropperListener imageListener = new ImageDropperListener();
476         createImageReader(mOrderedStillSizes.get(0), ImageFormat.JPEG, /*maxImages*/ 3,
477                 imageListener);
478         OutputConfiguration jpegOutput =
479                 new OutputConfiguration(OutputConfiguration.SURFACE_GROUP_ID_NONE, mReaderSurface);
480         outputSurfaces.add(jpegOutput);
481 
482         // Confirm that other surface types aren't supported for OutputConfiguration
483         Class[] unsupportedClasses =
484                 {android.media.ImageReader.class, android.media.MediaCodec.class,
485                  android.renderscript.Allocation.class, android.media.MediaRecorder.class};
486 
487         for (Class klass : unsupportedClasses) {
488             try {
489                 OutputConfiguration bad = new OutputConfiguration(maxPreviewSize, klass);
490                 fail("OutputConfiguration allowed use of unsupported class " + klass);
491             } catch (IllegalArgumentException e) {
492                 // expected
493             }
494         }
495 
496         // Confirm that zero surface size isn't supported for OutputConfiguration
497         Size[] sizeZeros = { new Size(0, 0), new Size(1, 0), new Size(0, 1) };
498         for (Size size : sizeZeros) {
499             try {
500                 OutputConfiguration bad = new OutputConfiguration(size, SurfaceHolder.class);
501                 fail("OutputConfiguration allowed use of zero surfaceSize");
502             } catch (IllegalArgumentException e) {
503                 //expected
504             }
505         }
506 
507         // Check whether session configuration is supported
508         CameraTestUtils.checkSessionConfigurationSupported(mCamera, mHandler, outputSurfaces,
509                 /*inputConfig*/ null, SessionConfiguration.SESSION_REGULAR,
510                 /*defaultSupport*/ true, "Deferred session configuration query failed");
511 
512         // Create session
513 
514         BlockingSessionCallback sessionListener =
515                 new BlockingSessionCallback();
516 
517         mSession = configureCameraSessionWithConfig(mCamera, outputSurfaces, sessionListener,
518                 mHandler);
519         sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY,
520                 SESSION_CONFIGURE_TIMEOUT_MS);
521 
522         // Submit JPEG requests
523 
524         CaptureRequest.Builder request = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
525         request.addTarget(mReaderSurface);
526 
527         final int SOME_FRAMES = 10;
528         for (int i = 0; i < SOME_FRAMES; i++) {
529             mSession.capture(request.build(), resultListener, mHandler);
530         }
531 
532         // Wait to get some frames out to ensure we can operate just the one expected surface
533         waitForNumResults(resultListener, SOME_FRAMES);
534         assertTrue("No images received", imageListener.getImageCount() > 0);
535 
536         // Ensure we can't use the deferred surfaces yet
537         request.addTarget(sharedOutputSurface1);
538         try {
539             mSession.capture(request.build(), resultListener, mHandler);
540             fail("Should have received IAE for trying to use a deferred target " +
541                     "that's not yet configured");
542         } catch (IllegalArgumentException e) {
543             // expected
544         }
545 
546         // Add deferred surfaces to their configurations
547         surfaceViewOutput.addSurface(mPreviewSurface);
548         surfaceTextureOutput.addSurface(sharedOutputSurface1);
549 
550         // Verify bad inputs to addSurface
551         try {
552             surfaceViewOutput.addSurface(null);
553             fail("No error from setting a null deferred surface");
554         } catch (NullPointerException e) {
555             // expected
556         }
557         try {
558             surfaceViewOutput.addSurface(mPreviewSurface);
559             fail("Shouldn't be able to set deferred surface twice");
560         } catch (IllegalStateException e) {
561             // expected
562         }
563 
564         // Add first deferred surface to session
565         List<OutputConfiguration> deferredSurfaces = new ArrayList<>();
566         deferredSurfaces.add(surfaceTextureOutput);
567 
568         mSession.finalizeOutputConfigurations(deferredSurfaces);
569 
570         // Try a second time, this should error
571 
572         try {
573             mSession.finalizeOutputConfigurations(deferredSurfaces);
574             fail("Should have received ISE for trying to finish a deferred output twice");
575         } catch (IllegalArgumentException e) {
576             // expected
577         }
578 
579         // Use new deferred surface for a bit
580         imageListener.resetImageCount();
581         for (int i = 0; i < SOME_FRAMES; i++) {
582             mSession.capture(request.build(), resultListener, mHandler);
583         }
584         waitForNumResults(resultListener, SOME_FRAMES);
585         assertTrue("No images received", imageListener.getImageCount() > 0);
586         assertTrue("No texture update received", textureAvailableListener.gotFrame());
587 
588         // Ensure we can't use the last deferred surface yet
589         request.addTarget(mPreviewSurface);
590         try {
591             mSession.capture(request.build(), resultListener, mHandler);
592             fail("Should have received IAE for trying to use a deferred target that's" +
593                     " not yet configured");
594         } catch (IllegalArgumentException e) {
595             // expected
596         }
597 
598         // Add final deferred surface
599         deferredSurfaces.clear();
600         deferredSurfaces.add(surfaceViewOutput);
601 
602         mSession.finalizeOutputConfigurations(deferredSurfaces);
603 
604         // Use final deferred surface for a bit
605         imageListener.resetImageCount();
606         for (int i = 0; i < SOME_FRAMES; i++) {
607             mSession.capture(request.build(), resultListener, mHandler);
608         }
609         waitForNumResults(resultListener, SOME_FRAMES);
610         assertTrue("No images received", imageListener.getImageCount() > 0);
611         // Can't check GL output since we don't have a context to call updateTexImage on, and
612         // the callback only fires once per updateTexImage call.
613         // And there's no way to verify data is going to a SurfaceView
614 
615         // Check for invalid output configurations being handed to a session
616         OutputConfiguration badConfig =
617                 new OutputConfiguration(maxPreviewSize, SurfaceTexture.class);
618         deferredSurfaces.clear();
619         try {
620             mSession.finalizeOutputConfigurations(deferredSurfaces);
621             fail("No error for empty list passed to finalizeOutputConfigurations");
622         } catch (IllegalArgumentException e) {
623             // expected
624         }
625 
626         deferredSurfaces.add(badConfig);
627         try {
628             mSession.finalizeOutputConfigurations(deferredSurfaces);
629             fail("No error for invalid output config being passed to finalizeOutputConfigurations");
630         } catch (IllegalArgumentException e) {
631             // expected
632         }
633 
634     }
635 
636     /**
637      * Measure the inter-frame interval based on SENSOR_TIMESTAMP for frameCount frames from the
638      * provided capture listener.  If prevTimestamp is positive, it is used for the first interval
639      * calculation; otherwise, the first result is used to establish the starting time.
640      *
641      * Returns the mean interval in the first pair entry, and the largest interval in the second
642      * pair entry
643      */
measureMeanFrameInterval(SimpleCaptureCallback resultListener, int frameCount, long prevTimestamp)644     Pair<Long, Long> measureMeanFrameInterval(SimpleCaptureCallback resultListener, int frameCount,
645             long prevTimestamp) throws Exception {
646         long summedIntervals = 0;
647         long maxInterval = 0;
648         int measurementCount = frameCount - ((prevTimestamp > 0) ? 0 : 1);
649 
650         for (int i = 0; i < frameCount; i++) {
651             CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
652             long timestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
653             if (prevTimestamp > 0) {
654                 long interval = timestamp - prevTimestamp;
655                 if (interval > maxInterval) maxInterval = interval;
656                 summedIntervals += interval;
657             }
658             prevTimestamp = timestamp;
659         }
660         return new Pair<Long, Long>(summedIntervals / measurementCount, maxInterval);
661     }
662 
663 
664     /**
665      * Test preview fps range for all supported ranges. The exposure time are frame duration are
666      * validated.
667      */
previewFpsRangeTestByCamera()668     private void previewFpsRangeTestByCamera() throws Exception {
669         Size maxPreviewSz;
670         Range<Integer>[] fpsRanges = getDescendingTargetFpsRanges(mStaticInfo);
671         boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported();
672         Range<Integer> fpsRange;
673         CaptureRequest.Builder requestBuilder =
674                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
675         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
676 
677         for (int i = 0; i < fpsRanges.length; i += 1) {
678             fpsRange = fpsRanges[i];
679             if (mStaticInfo.isHardwareLevelLegacy()) {
680                 // Legacy devices don't report minimum frame duration for preview sizes. The FPS
681                 // range should be valid for any supported preview size.
682                 maxPreviewSz = mOrderedPreviewSizes.get(0);
683             } else {
684                 maxPreviewSz = getMaxPreviewSizeForFpsRange(fpsRange);
685             }
686 
687             requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
688             // Turn off auto antibanding to avoid exposure time and frame duration interference
689             // from antibanding algorithm.
690             if (antiBandingOffIsSupported) {
691                 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE,
692                         CaptureRequest.CONTROL_AE_ANTIBANDING_MODE_OFF);
693             } else {
694                 // The device doesn't implement the OFF mode, test continues. It need make sure
695                 // that the antibanding algorithm doesn't interfere with the fps range control.
696                 Log.i(TAG, "OFF antibanding mode is not supported, the camera device output must" +
697                         " satisfy the specified fps range regardless of its current antibanding" +
698                         " mode");
699             }
700 
701             startPreview(requestBuilder, maxPreviewSz, resultListener);
702             resultListener = new SimpleCaptureCallback();
703             mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler);
704 
705             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
706 
707             verifyPreviewTargetFpsRange(resultListener, NUM_FRAMES_VERIFIED, fpsRange,
708                     maxPreviewSz);
709             stopPreview();
710             resultListener.drain();
711         }
712     }
713 
verifyPreviewTargetFpsRange(SimpleCaptureCallback resultListener, int numFramesVerified, Range<Integer> fpsRange, Size previewSz)714     private void verifyPreviewTargetFpsRange(SimpleCaptureCallback resultListener,
715             int numFramesVerified, Range<Integer> fpsRange, Size previewSz) {
716         CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
717         List<Integer> capabilities = mStaticInfo.getAvailableCapabilitiesChecked();
718 
719         if (capabilities.contains(CaptureRequest.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
720             long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION);
721             long[] frameDurationRange =
722                     new long[]{(long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())};
723             mCollector.expectInRange(
724                     "Frame duration must be in the range of " + Arrays.toString(frameDurationRange),
725                     frameDuration, (long) (frameDurationRange[0] * (1 - FRAME_DURATION_ERROR_MARGIN)),
726                     (long) (frameDurationRange[1] * (1 + FRAME_DURATION_ERROR_MARGIN)));
727             long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
728             mCollector.expectTrue(String.format("Exposure time %d must be no larger than frame"
729                     + "duration %d", expTime, frameDuration), expTime <= frameDuration);
730 
731             Long minFrameDuration = mMinPreviewFrameDurationMap.get(previewSz);
732             boolean findDuration = mCollector.expectTrue("Unable to find minFrameDuration for size "
733                     + previewSz.toString(), minFrameDuration != null);
734             if (findDuration) {
735                 mCollector.expectTrue("Frame duration " + frameDuration + " must be no smaller than"
736                         + " minFrameDuration " + minFrameDuration, frameDuration >= minFrameDuration);
737             }
738         } else {
739             Log.i(TAG, "verifyPreviewTargetFpsRange - MANUAL_SENSOR control is not supported," +
740                     " skipping duration and exposure time check.");
741         }
742     }
743 
744     /**
745      * Test all supported preview sizes for a camera device
746      *
747      * @throws Exception
748      */
previewTestByCamera()749     private void previewTestByCamera() throws Exception {
750         List<Size> previewSizes = getSupportedPreviewSizes(
751                 mCamera.getId(), mCameraManager, PREVIEW_SIZE_BOUND);
752 
753         for (final Size sz : previewSizes) {
754             if (VERBOSE) {
755                 Log.v(TAG, "Testing camera preview size: " + sz.toString());
756             }
757 
758             // TODO: vary the different settings like crop region to cover more cases.
759             CaptureRequest.Builder requestBuilder =
760                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
761             CaptureCallback mockCaptureCallback =
762                     mock(CameraCaptureSession.CaptureCallback.class);
763 
764             startPreview(requestBuilder, sz, mockCaptureCallback);
765             verifyCaptureResults(mSession, mockCaptureCallback, NUM_FRAMES_VERIFIED,
766                     NUM_FRAMES_VERIFIED * FRAME_TIMEOUT_MS);
767             stopPreview();
768         }
769     }
770 
previewTestPatternTestByCamera()771     private void previewTestPatternTestByCamera() throws Exception {
772         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
773         int[] testPatternModes = mStaticInfo.getAvailableTestPatternModesChecked();
774         CaptureRequest.Builder requestBuilder =
775                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
776         CaptureCallback mockCaptureCallback;
777 
778         final int[] TEST_PATTERN_DATA = {0, 0xFFFFFFFF, 0xFFFFFFFF, 0}; // G:100%, RB:0.
779         for (int mode : testPatternModes) {
780             if (VERBOSE) {
781                 Log.v(TAG, "Test pattern mode: " + mode);
782             }
783             requestBuilder.set(CaptureRequest.SENSOR_TEST_PATTERN_MODE, mode);
784             if (mode == CaptureRequest.SENSOR_TEST_PATTERN_MODE_SOLID_COLOR) {
785                 // Assign color pattern to SENSOR_TEST_PATTERN_MODE_DATA
786                 requestBuilder.set(CaptureRequest.SENSOR_TEST_PATTERN_DATA, TEST_PATTERN_DATA);
787             }
788             mockCaptureCallback = mock(CaptureCallback.class);
789             startPreview(requestBuilder, maxPreviewSize, mockCaptureCallback);
790             verifyCaptureResults(mSession, mockCaptureCallback, NUM_TEST_PATTERN_FRAMES_VERIFIED,
791                     NUM_TEST_PATTERN_FRAMES_VERIFIED * FRAME_TIMEOUT_MS);
792         }
793 
794         stopPreview();
795     }
796 
surfaceSetTestByCamera(String cameraId)797     private void surfaceSetTestByCamera(String cameraId) throws Exception {
798         final int MAX_SURFACE_GROUP_ID = 10;
799         Size maxPreviewSz = mOrderedPreviewSizes.get(0);
800         Size yuvSizeBound = maxPreviewSz; // Default case: legacy device
801         if (mStaticInfo.isHardwareLevelLimited()) {
802             yuvSizeBound = mOrderedVideoSizes.get(0);
803         } else if (mStaticInfo.isHardwareLevelAtLeastFull()) {
804             yuvSizeBound = null;
805         }
806         Size maxYuvSize = getSupportedPreviewSizes(cameraId, mCameraManager, yuvSizeBound).get(0);
807 
808         CaptureRequest.Builder requestBuilder =
809                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
810         ImageDropperListener imageListener = new ImageDropperListener();
811 
812         updatePreviewSurface(maxPreviewSz);
813         createImageReader(maxYuvSize, ImageFormat.YUV_420_888, MAX_READER_IMAGES, imageListener);
814         List<OutputConfiguration> outputConfigs = new ArrayList<OutputConfiguration>();
815         OutputConfiguration previewConfig = new OutputConfiguration(mPreviewSurface);
816         OutputConfiguration yuvConfig = new OutputConfiguration(mReaderSurface);
817         assertEquals(OutputConfiguration.SURFACE_GROUP_ID_NONE, previewConfig.getSurfaceGroupId());
818         assertEquals(OutputConfiguration.SURFACE_GROUP_ID_NONE, yuvConfig.getSurfaceGroupId());
819         assertEquals(mPreviewSurface, previewConfig.getSurface());
820         assertEquals(mReaderSurface, yuvConfig.getSurface());
821         outputConfigs.add(previewConfig);
822         outputConfigs.add(yuvConfig);
823         requestBuilder.addTarget(mPreviewSurface);
824         requestBuilder.addTarget(mReaderSurface);
825 
826         // Test different stream set ID.
827         for (int surfaceGroupId = OutputConfiguration.SURFACE_GROUP_ID_NONE;
828                 surfaceGroupId < MAX_SURFACE_GROUP_ID; surfaceGroupId++) {
829             if (VERBOSE) {
830                 Log.v(TAG, "test preview with surface group id: ");
831             }
832 
833             previewConfig = new OutputConfiguration(surfaceGroupId, mPreviewSurface);
834             yuvConfig = new OutputConfiguration(surfaceGroupId, mReaderSurface);
835             outputConfigs.clear();
836             outputConfigs.add(previewConfig);
837             outputConfigs.add(yuvConfig);
838 
839             for (OutputConfiguration config : outputConfigs) {
840                 assertEquals(surfaceGroupId, config.getSurfaceGroupId());
841             }
842 
843             CameraCaptureSession.StateCallback mockSessionListener =
844                     mock(CameraCaptureSession.StateCallback.class);
845 
846             mSession = configureCameraSessionWithConfig(mCamera, outputConfigs,
847                     mockSessionListener, mHandler);
848 
849 
850             mSession.prepare(mPreviewSurface);
851             verify(mockSessionListener,
852                     timeout(PREPARE_TIMEOUT_MS).times(1)).
853                     onSurfacePrepared(eq(mSession), eq(mPreviewSurface));
854 
855             mSession.prepare(mReaderSurface);
856             verify(mockSessionListener,
857                     timeout(PREPARE_TIMEOUT_MS).times(1)).
858                     onSurfacePrepared(eq(mSession), eq(mReaderSurface));
859 
860             CaptureRequest request = requestBuilder.build();
861             CaptureCallback mockCaptureCallback =
862                     mock(CameraCaptureSession.CaptureCallback.class);
863             mSession.setRepeatingRequest(request, mockCaptureCallback, mHandler);
864             verifyCaptureResults(mSession, mockCaptureCallback, NUM_FRAMES_VERIFIED,
865                     NUM_FRAMES_VERIFIED * FRAME_TIMEOUT_MS);
866         }
867     }
868 
869     private class IsCaptureResultValid implements ArgumentMatcher<TotalCaptureResult> {
870         @Override
matches(TotalCaptureResult obj)871         public boolean matches(TotalCaptureResult obj) {
872             TotalCaptureResult result = obj;
873             Long timeStamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
874             if (timeStamp != null && timeStamp.longValue() > 0L) {
875                 return true;
876             }
877             return false;
878         }
879     }
880 
verifyCaptureResults( CameraCaptureSession session, CaptureCallback mockListener, int expectResultCount, int timeOutMs)881     private void verifyCaptureResults(
882             CameraCaptureSession session,
883             CaptureCallback mockListener,
884             int expectResultCount,
885             int timeOutMs) {
886         // Should receive expected number of onCaptureStarted callbacks.
887         ArgumentCaptor<Long> timestamps = ArgumentCaptor.forClass(Long.class);
888         ArgumentCaptor<Long> frameNumbers = ArgumentCaptor.forClass(Long.class);
889         verify(mockListener,
890                 timeout(timeOutMs).atLeast(expectResultCount))
891                         .onCaptureStarted(
892                                 eq(session),
893                                 isA(CaptureRequest.class),
894                                 timestamps.capture(),
895                                 frameNumbers.capture());
896 
897         // Validate timestamps: all timestamps should be larger than 0 and monotonically increase.
898         long timestamp = 0;
899         for (Long nextTimestamp : timestamps.getAllValues()) {
900             assertNotNull("Next timestamp is null!", nextTimestamp);
901             assertTrue("Captures are out of order", timestamp < nextTimestamp);
902             timestamp = nextTimestamp;
903         }
904 
905         // Validate framenumbers: all framenumbers should be consecutive and positive
906         long frameNumber = -1;
907         for (Long nextFrameNumber : frameNumbers.getAllValues()) {
908             assertNotNull("Next frame number is null!", nextFrameNumber);
909             assertTrue("Captures are out of order",
910                     (frameNumber == -1) || (frameNumber + 1 == nextFrameNumber));
911             frameNumber = nextFrameNumber;
912         }
913 
914         // Should receive expected number of capture results.
915         verify(mockListener,
916                 timeout(timeOutMs).atLeast(expectResultCount))
917                         .onCaptureCompleted(
918                                 eq(session),
919                                 isA(CaptureRequest.class),
920                                 argThat(new IsCaptureResultValid()));
921 
922         // Should not receive any capture failed callbacks.
923         verify(mockListener, never())
924                         .onCaptureFailed(
925                                 eq(session),
926                                 isA(CaptureRequest.class),
927                                 isA(CaptureFailure.class));
928     }
929 
930 }
931