• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.*;
20 
21 import android.graphics.ImageFormat;
22 import android.view.Surface;
23 
24 import com.android.ex.camera2.blocking.BlockingSessionCallback;
25 
26 import android.graphics.SurfaceTexture;
27 import android.hardware.camera2.CameraCaptureSession;
28 import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
29 import android.hardware.camera2.CameraDevice;
30 import android.hardware.camera2.CaptureFailure;
31 import android.hardware.camera2.CaptureRequest;
32 import android.hardware.camera2.CaptureResult;
33 import android.hardware.camera2.TotalCaptureResult;
34 import android.util.Size;
35 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
36 import android.hardware.camera2.cts.helpers.StaticMetadata;
37 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
38 import android.hardware.camera2.params.OutputConfiguration;
39 import android.hardware.camera2.params.SessionConfiguration;
40 import android.util.Log;
41 import android.util.Pair;
42 import android.util.Range;
43 import android.view.SurfaceView;
44 import android.view.SurfaceHolder;
45 
46 import org.mockito.ArgumentCaptor;
47 import org.mockito.ArgumentMatcher;
48 
49 import static org.mockito.Mockito.*;
50 
51 import java.util.ArrayList;
52 import java.util.Arrays;
53 import java.util.List;
54 
55 import org.junit.Test;
56 
57 /**
58  * CameraDevice preview test by using SurfaceView.
59  */
60 public class SurfaceViewPreviewTest extends Camera2SurfaceViewTestCase {
61     private static final String TAG = "SurfaceViewPreviewTest";
62     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
63     private static final int FRAME_TIMEOUT_MS = 1000;
64     private static final int NUM_FRAMES_VERIFIED = 30;
65     private static final int NUM_TEST_PATTERN_FRAMES_VERIFIED = 60;
66     private static final float FRAME_DURATION_ERROR_MARGIN = 0.01f; // 1 percent error margin.
67     private static final int PREPARE_TIMEOUT_MS = 10000; // 10 s
68 
69     @Override
setUp()70     public void setUp() throws Exception {
71         super.setUp();
72     }
73 
74     @Override
tearDown()75     public void tearDown() throws Exception {
76         super.tearDown();
77     }
78 
79     /**
80      * Test all supported preview sizes for each camera device.
81      * <p>
82      * For the first  {@link #NUM_FRAMES_VERIFIED}  of capture results,
83      * the {@link CaptureCallback} callback availability and the capture timestamp
84      * (monotonically increasing) ordering are verified.
85      * </p>
86      */
87     @Test
testCameraPreview()88     public void testCameraPreview() throws Exception {
89         for (int i = 0; i < mCameraIds.length; i++) {
90             try {
91                 Log.i(TAG, "Testing preview for Camera " + mCameraIds[i]);
92                 if (!mAllStaticInfo.get(mCameraIds[i]).isColorOutputSupported()) {
93                     Log.i(TAG, "Camera " + mCameraIds[i] +
94                             " does not support color outputs, skipping");
95                     continue;
96                 }
97                 openDevice(mCameraIds[i]);
98                 previewTestByCamera();
99             } finally {
100                 closeDevice();
101             }
102         }
103     }
104 
105     /**
106      * Basic test pattern mode preview.
107      * <p>
108      * Only test the test pattern preview and capture result, the image buffer
109      * is not validated.
110      * </p>
111      */
112     @Test
testBasicTestPatternPreview()113     public void testBasicTestPatternPreview() throws Exception{
114         for (int i = 0; i < mCameraIds.length; i++) {
115             try {
116                 Log.i(TAG, "Testing preview for Camera " + mCameraIds[i]);
117                 if (!mAllStaticInfo.get(mCameraIds[i]).isColorOutputSupported()) {
118                     Log.i(TAG, "Camera " + mCameraIds[i] +
119                             " does not support color outputs, skipping");
120                     continue;
121                 }
122                 openDevice(mCameraIds[i]);
123                 previewTestPatternTestByCamera();
124             } finally {
125                 closeDevice();
126             }
127         }
128     }
129 
130     /**
131      * Test {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE} for preview, validate the preview
132      * frame duration and exposure time.
133      */
134     @Test
testPreviewFpsRange()135     public void testPreviewFpsRange() throws Exception {
136         for (String id : mCameraIds) {
137             try {
138                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
139                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
140                     continue;
141                 }
142                 openDevice(id);
143                 previewFpsRangeTestByCamera();
144             } finally {
145                 closeDevice();
146             }
147         }
148     }
149 
150     /**
151      * Test surface set streaming use cases.
152      *
153      * <p>
154      * The test sets output configuration with increasing surface set IDs for preview and YUV
155      * streams. The max supported preview size is selected for preview stream, and the max
156      * supported YUV size (depending on hw supported level) is selected for YUV stream. This test
157      * also exercises the prepare API.
158      * </p>
159      */
160     @Test
testSurfaceSet()161     public void testSurfaceSet() throws Exception {
162         for (String id : mCameraIds) {
163             try {
164                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
165                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
166                     continue;
167                 }
168                 openDevice(id);
169                 surfaceSetTestByCamera(id);
170             } finally {
171                 closeDevice();
172             }
173         }
174     }
175 
176     /**
177      * Test to verify the {@link CameraCaptureSession#prepare} method works correctly, and has the
178      * expected effects on performance.
179      *
180      * - Ensure that prepare() results in onSurfacePrepared() being invoked
181      * - Ensure that prepare() does not cause preview glitches while operating
182      * - Ensure that starting to use a newly-prepared output does not cause additional
183      *   preview glitches to occur
184      */
185     @Test
testPreparePerformance()186     public void testPreparePerformance() throws Throwable {
187         for (int i = 0; i < mCameraIds.length; i++) {
188             try {
189                 if (!mAllStaticInfo.get(mCameraIds[i]).isColorOutputSupported()) {
190                     Log.i(TAG, "Camera " + mCameraIds[i] +
191                             " does not support color outputs, skipping");
192                     continue;
193                 }
194                 openDevice(mCameraIds[i]);
195                 preparePerformanceTestByCamera(mCameraIds[i]);
196             }
197             finally {
198                 closeDevice();
199             }
200         }
201     }
202 
preparePerformanceTestByCamera(String cameraId)203     private void preparePerformanceTestByCamera(String cameraId) throws Exception {
204         final int MAX_IMAGES_TO_PREPARE = 10;
205         final int UNKNOWN_LATENCY_RESULT_WAIT = 5;
206         final int MAX_RESULTS_TO_WAIT = 10;
207         final int FRAMES_FOR_AVERAGING = 100;
208         final float PREPARE_FRAME_RATE_BOUNDS = 0.05f; // fraction allowed difference
209         final float PREPARE_PEAK_RATE_BOUNDS = 0.5f; // fraction allowed difference
210 
211         Size maxYuvSize = getSupportedPreviewSizes(cameraId, mCameraManager, null).get(0);
212         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
213 
214         // Don't need image data, just drop it right away to minimize overhead
215         ImageDropperListener imageListener = new ImageDropperListener();
216 
217         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
218 
219         CaptureRequest.Builder previewRequest =
220                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
221 
222         // Configure outputs and session
223 
224         updatePreviewSurface(maxPreviewSize);
225 
226         createImageReader(maxYuvSize, ImageFormat.YUV_420_888, MAX_IMAGES_TO_PREPARE, imageListener);
227 
228         List<Surface> outputSurfaces = new ArrayList<Surface>();
229         outputSurfaces.add(mPreviewSurface);
230         outputSurfaces.add(mReaderSurface);
231 
232         CameraCaptureSession.StateCallback mockSessionListener =
233                 mock(CameraCaptureSession.StateCallback.class);
234 
235         mSession = configureCameraSession(mCamera, outputSurfaces, mockSessionListener, mHandler);
236 
237         previewRequest.addTarget(mPreviewSurface);
238         Range<Integer> maxFpsTarget = mStaticInfo.getAeMaxTargetFpsRange();
239         previewRequest.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, maxFpsTarget);
240 
241         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
242 
243         // Converge AE
244         waitForAeStable(resultListener, UNKNOWN_LATENCY_RESULT_WAIT);
245 
246         if (mStaticInfo.isAeLockSupported()) {
247             // Lock AE if possible to improve stability
248             previewRequest.set(CaptureRequest.CONTROL_AE_LOCK, true);
249             mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
250             if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
251                 // Legacy mode doesn't output AE state
252                 waitForResultValue(resultListener, CaptureResult.CONTROL_AE_STATE,
253                         CaptureResult.CONTROL_AE_STATE_LOCKED, MAX_RESULTS_TO_WAIT);
254             }
255         }
256 
257         // Measure frame rate for a bit
258         Pair<Long, Long> frameDurationStats =
259                 measureMeanFrameInterval(resultListener, FRAMES_FOR_AVERAGING, /*prevTimestamp*/ 0);
260 
261         Log.i(TAG, String.format("Frame interval avg during normal preview: %f ms, peak %f ms",
262                         frameDurationStats.first / 1e6, frameDurationStats.second / 1e6));
263 
264         // Drain results, do prepare
265         resultListener.drain();
266 
267         mSession.prepare(mReaderSurface);
268 
269         verify(mockSessionListener,
270                 timeout(PREPARE_TIMEOUT_MS).times(1)).
271                 onSurfacePrepared(eq(mSession), eq(mReaderSurface));
272 
273         // Calculate frame rate during prepare
274 
275         int resultsReceived = (int) resultListener.getTotalNumFrames();
276         if (resultsReceived > 2) {
277             // Only verify frame rate if there are a couple of results
278             Pair<Long, Long> whilePreparingFrameDurationStats =
279                     measureMeanFrameInterval(resultListener, resultsReceived, /*prevTimestamp*/ 0);
280 
281             Log.i(TAG, String.format("Frame interval during prepare avg: %f ms, peak %f ms",
282                             whilePreparingFrameDurationStats.first / 1e6,
283                             whilePreparingFrameDurationStats.second / 1e6));
284 
285             if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
286                 mCollector.expectTrue(
287                     String.format("Camera %s: Preview peak frame interval affected by prepare " +
288                             "call: preview avg frame duration: %f ms, peak during prepare: %f ms",
289                             cameraId,
290                             frameDurationStats.first / 1e6,
291                             whilePreparingFrameDurationStats.second / 1e6),
292                     (whilePreparingFrameDurationStats.second <=
293                             frameDurationStats.first * (1 + PREPARE_PEAK_RATE_BOUNDS)));
294                 mCollector.expectTrue(
295                     String.format("Camera %s: Preview average frame interval affected by prepare " +
296                             "call: preview avg frame duration: %f ms, during prepare: %f ms",
297                             cameraId,
298                             frameDurationStats.first / 1e6,
299                             whilePreparingFrameDurationStats.first / 1e6),
300                     (whilePreparingFrameDurationStats.first <=
301                             frameDurationStats.first * (1 + PREPARE_FRAME_RATE_BOUNDS)));
302             }
303         }
304 
305         resultListener.drain();
306 
307         // Get at least one more preview result without prepared target
308         CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
309         long prevTimestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
310 
311         // Now use the prepared stream and ensure there are no hiccups from using it
312         previewRequest.addTarget(mReaderSurface);
313 
314         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
315 
316         Pair<Long, Long> preparedFrameDurationStats =
317                 measureMeanFrameInterval(resultListener, MAX_IMAGES_TO_PREPARE*2, prevTimestamp);
318 
319         Log.i(TAG, String.format("Frame interval with prepared stream added avg: %f ms, peak %f ms",
320                         preparedFrameDurationStats.first / 1e6,
321                         preparedFrameDurationStats.second / 1e6));
322 
323         if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
324             mCollector.expectTrue(
325                 String.format("Camera %s: Preview peak frame interval affected by use of new " +
326                         " stream: preview avg frame duration: %f ms, peak with new stream: %f ms",
327                         cameraId,
328                         frameDurationStats.first / 1e6, preparedFrameDurationStats.second / 1e6),
329                 (preparedFrameDurationStats.second <=
330                         frameDurationStats.first * (1 + PREPARE_PEAK_RATE_BOUNDS)));
331             mCollector.expectTrue(
332                 String.format("Camera %s: Preview average frame interval affected by use of new " +
333                         "stream: preview avg frame duration: %f ms, with new stream: %f ms",
334                         cameraId,
335                         frameDurationStats.first / 1e6, preparedFrameDurationStats.first / 1e6),
336                 (preparedFrameDurationStats.first <=
337                         frameDurationStats.first * (1 + PREPARE_FRAME_RATE_BOUNDS)));
338         }
339     }
340 
341     /**
342      * Test to verify correct behavior with the same Surface object being used repeatedly with
343      * different native internals, and multiple Surfaces pointing to the same actual consumer object
344      */
345     @Test
testSurfaceEquality()346     public void testSurfaceEquality() throws Exception {
347         for (int i = 0; i < mCameraIds.length; i++) {
348             try {
349                 if (!mAllStaticInfo.get(mCameraIds[i]).isColorOutputSupported()) {
350                     Log.i(TAG, "Camera " + mCameraIds[i] +
351                             " does not support color outputs, skipping");
352                     continue;
353                 }
354                 openDevice(mCameraIds[i]);
355                 surfaceEqualityTestByCamera(mCameraIds[i]);
356             }
357             finally {
358                 closeDevice();
359             }
360         }
361     }
362 
surfaceEqualityTestByCamera(String cameraId)363     private void surfaceEqualityTestByCamera(String cameraId) throws Exception {
364         final int SOME_FRAMES = 10;
365 
366         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
367 
368         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
369 
370         // Create a SurfaceTexture for a second output
371         SurfaceTexture sharedOutputTexture = new SurfaceTexture(/*random texture ID*/ 5);
372         sharedOutputTexture.setDefaultBufferSize(maxPreviewSize.getWidth(),
373                 maxPreviewSize.getHeight());
374         Surface sharedOutputSurface1 = new Surface(sharedOutputTexture);
375 
376         updatePreviewSurface(maxPreviewSize);
377 
378         List<Surface> outputSurfaces = new ArrayList<Surface>();
379         outputSurfaces.add(mPreviewSurface);
380         outputSurfaces.add(sharedOutputSurface1);
381 
382         BlockingSessionCallback sessionListener =
383                 new BlockingSessionCallback();
384 
385         mSession = configureCameraSession(mCamera, outputSurfaces, sessionListener, mHandler);
386         sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY,
387                 SESSION_CONFIGURE_TIMEOUT_MS);
388 
389         CaptureRequest.Builder previewRequest =
390                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
391         previewRequest.addTarget(mPreviewSurface);
392         previewRequest.addTarget(sharedOutputSurface1);
393 
394         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
395 
396         // Wait to get some frames out
397         waitForNumResults(resultListener, SOME_FRAMES);
398 
399         // Drain
400         mSession.abortCaptures();
401         sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY,
402                 SESSION_CONFIGURE_TIMEOUT_MS);
403 
404         // Hide / unhide the SurfaceView to get a new target Surface
405         recreatePreviewSurface();
406 
407         // And resize it again
408         updatePreviewSurface(maxPreviewSize);
409 
410         // Create a second surface that targets the shared SurfaceTexture
411         Surface sharedOutputSurface2 = new Surface(sharedOutputTexture);
412 
413         // Use the new Surfaces for a new session
414         outputSurfaces.clear();
415         outputSurfaces.add(mPreviewSurface);
416         outputSurfaces.add(sharedOutputSurface2);
417 
418         sessionListener = new BlockingSessionCallback();
419 
420         mSession = configureCameraSession(mCamera, outputSurfaces, sessionListener, mHandler);
421 
422         previewRequest =
423                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
424         previewRequest.addTarget(mPreviewSurface);
425         previewRequest.addTarget(sharedOutputSurface2);
426 
427         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
428 
429         // Wait to get some frames out
430         waitForNumResults(resultListener, SOME_FRAMES);
431     }
432 
433     /*
434      * Verify creation of deferred surface capture sessions
435      */
436     @Test
testDeferredSurfaces()437     public void testDeferredSurfaces() throws Exception {
438         for (int i = 0; i < mCameraIds.length; i++) {
439             try {
440                 StaticMetadata staticInfo = mAllStaticInfo.get(mCameraIds[i]);
441                 if (staticInfo.isHardwareLevelLegacy()) {
442                     Log.i(TAG, "Camera " + mCameraIds[i] + " is legacy, skipping");
443                     continue;
444                 }
445                 if (!staticInfo.isColorOutputSupported()) {
446                     Log.i(TAG, "Camera " + mCameraIds[i] +
447                             " does not support color outputs, skipping");
448                     continue;
449                 }
450 
451                 openDevice(mCameraIds[i]);
452                 testDeferredSurfacesByCamera(mCameraIds[i]);
453             }
454             finally {
455                 closeDevice();
456             }
457         }
458     }
459 
testDeferredSurfacesByCamera(String cameraId)460     private void testDeferredSurfacesByCamera(String cameraId) throws Exception {
461         Size maxPreviewSize = m1080pBoundedOrderedPreviewSizes.get(0);
462 
463         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
464 
465         // Create a SurfaceTexture for a second output
466         SurfaceTexture sharedOutputTexture = new SurfaceTexture(/*random texture ID*/ 5);
467         sharedOutputTexture.setDefaultBufferSize(maxPreviewSize.getWidth(),
468                 maxPreviewSize.getHeight());
469         Surface sharedOutputSurface1 = new Surface(sharedOutputTexture);
470 
471         class TextureAvailableListener implements SurfaceTexture.OnFrameAvailableListener {
472             @Override
473             public void onFrameAvailable(SurfaceTexture t) {
474                 mGotFrame = true;
475             }
476             public boolean gotFrame() { return mGotFrame; }
477 
478             private volatile boolean mGotFrame = false;
479         }
480         TextureAvailableListener textureAvailableListener = new TextureAvailableListener();
481 
482         sharedOutputTexture.setOnFrameAvailableListener(textureAvailableListener, mHandler);
483 
484         updatePreviewSurface(maxPreviewSize);
485 
486         // Create deferred outputs for surface view and surface texture
487         OutputConfiguration surfaceViewOutput = new OutputConfiguration(maxPreviewSize,
488                 SurfaceHolder.class);
489         OutputConfiguration surfaceTextureOutput = new OutputConfiguration(maxPreviewSize,
490                 SurfaceTexture.class);
491 
492         List<OutputConfiguration> outputSurfaces = new ArrayList<>();
493         outputSurfaces.add(surfaceViewOutput);
494         outputSurfaces.add(surfaceTextureOutput);
495 
496         // Create non-deferred ImageReader output (JPEG for LIMITED-level compatibility)
497         ImageDropperListener imageListener = new ImageDropperListener();
498         createImageReader(mOrderedStillSizes.get(0), ImageFormat.JPEG, /*maxImages*/ 3,
499                 imageListener);
500         OutputConfiguration jpegOutput =
501                 new OutputConfiguration(OutputConfiguration.SURFACE_GROUP_ID_NONE, mReaderSurface);
502         outputSurfaces.add(jpegOutput);
503 
504         // Confirm that other surface types aren't supported for OutputConfiguration
505         Class[] unsupportedClasses =
506                 {android.media.ImageReader.class, android.media.MediaCodec.class,
507                  android.renderscript.Allocation.class, android.media.MediaRecorder.class};
508 
509         for (Class klass : unsupportedClasses) {
510             try {
511                 OutputConfiguration bad = new OutputConfiguration(maxPreviewSize, klass);
512                 fail("OutputConfiguration allowed use of unsupported class " + klass);
513             } catch (IllegalArgumentException e) {
514                 // expected
515             }
516         }
517 
518         // Confirm that zero surface size isn't supported for OutputConfiguration
519         Size[] sizeZeros = { new Size(0, 0), new Size(1, 0), new Size(0, 1) };
520         for (Size size : sizeZeros) {
521             try {
522                 OutputConfiguration bad = new OutputConfiguration(size, SurfaceHolder.class);
523                 fail("OutputConfiguration allowed use of zero surfaceSize");
524             } catch (IllegalArgumentException e) {
525                 //expected
526             }
527         }
528 
529         // Check whether session configuration is supported
530         CameraTestUtils.checkSessionConfigurationSupported(mCamera, mHandler, outputSurfaces,
531                 /*inputConfig*/ null, SessionConfiguration.SESSION_REGULAR,
532                 /*defaultSupport*/ true, "Deferred session configuration query failed");
533 
534         // Create session
535 
536         BlockingSessionCallback sessionListener =
537                 new BlockingSessionCallback();
538 
539         mSession = configureCameraSessionWithConfig(mCamera, outputSurfaces, sessionListener,
540                 mHandler);
541         sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY,
542                 SESSION_CONFIGURE_TIMEOUT_MS);
543 
544         // Submit JPEG requests
545 
546         CaptureRequest.Builder request = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
547         request.addTarget(mReaderSurface);
548 
549         final int SOME_FRAMES = 10;
550         for (int i = 0; i < SOME_FRAMES; i++) {
551             mSession.capture(request.build(), resultListener, mHandler);
552         }
553 
554         // Wait to get some frames out to ensure we can operate just the one expected surface
555         waitForNumResults(resultListener, SOME_FRAMES);
556         assertTrue("No images received", imageListener.getImageCount() > 0);
557 
558         // Ensure we can't use the deferred surfaces yet
559         request.addTarget(sharedOutputSurface1);
560         try {
561             mSession.capture(request.build(), resultListener, mHandler);
562             fail("Should have received IAE for trying to use a deferred target " +
563                     "that's not yet configured");
564         } catch (IllegalArgumentException e) {
565             // expected
566         }
567 
568         // Add deferred surfaces to their configurations
569         surfaceViewOutput.addSurface(mPreviewSurface);
570         surfaceTextureOutput.addSurface(sharedOutputSurface1);
571 
572         // Verify bad inputs to addSurface
573         try {
574             surfaceViewOutput.addSurface(null);
575             fail("No error from setting a null deferred surface");
576         } catch (NullPointerException e) {
577             // expected
578         }
579         try {
580             surfaceViewOutput.addSurface(mPreviewSurface);
581             fail("Shouldn't be able to set deferred surface twice");
582         } catch (IllegalStateException e) {
583             // expected
584         }
585 
586         // Add first deferred surface to session
587         List<OutputConfiguration> deferredSurfaces = new ArrayList<>();
588         deferredSurfaces.add(surfaceTextureOutput);
589 
590         mSession.finalizeOutputConfigurations(deferredSurfaces);
591 
592         // Try a second time, this should error
593 
594         try {
595             mSession.finalizeOutputConfigurations(deferredSurfaces);
596             fail("Should have received ISE for trying to finish a deferred output twice");
597         } catch (IllegalArgumentException e) {
598             // expected
599         }
600 
601         // Use new deferred surface for a bit
602         imageListener.resetImageCount();
603         for (int i = 0; i < SOME_FRAMES; i++) {
604             mSession.capture(request.build(), resultListener, mHandler);
605         }
606         waitForNumResults(resultListener, SOME_FRAMES);
607         assertTrue("No images received", imageListener.getImageCount() > 0);
608         assertTrue("No texture update received", textureAvailableListener.gotFrame());
609 
610         // Ensure we can't use the last deferred surface yet
611         request.addTarget(mPreviewSurface);
612         try {
613             mSession.capture(request.build(), resultListener, mHandler);
614             fail("Should have received IAE for trying to use a deferred target that's" +
615                     " not yet configured");
616         } catch (IllegalArgumentException e) {
617             // expected
618         }
619 
620         // Add final deferred surface
621         deferredSurfaces.clear();
622         deferredSurfaces.add(surfaceViewOutput);
623 
624         mSession.finalizeOutputConfigurations(deferredSurfaces);
625 
626         // Use final deferred surface for a bit
627         imageListener.resetImageCount();
628         for (int i = 0; i < SOME_FRAMES; i++) {
629             mSession.capture(request.build(), resultListener, mHandler);
630         }
631         waitForNumResults(resultListener, SOME_FRAMES);
632         assertTrue("No images received", imageListener.getImageCount() > 0);
633         // Can't check GL output since we don't have a context to call updateTexImage on, and
634         // the callback only fires once per updateTexImage call.
635         // And there's no way to verify data is going to a SurfaceView
636 
637         // Check for invalid output configurations being handed to a session
638         OutputConfiguration badConfig =
639                 new OutputConfiguration(maxPreviewSize, SurfaceTexture.class);
640         deferredSurfaces.clear();
641         try {
642             mSession.finalizeOutputConfigurations(deferredSurfaces);
643             fail("No error for empty list passed to finalizeOutputConfigurations");
644         } catch (IllegalArgumentException e) {
645             // expected
646         }
647 
648         deferredSurfaces.add(badConfig);
649         try {
650             mSession.finalizeOutputConfigurations(deferredSurfaces);
651             fail("No error for invalid output config being passed to finalizeOutputConfigurations");
652         } catch (IllegalArgumentException e) {
653             // expected
654         }
655 
656     }
657 
658     /**
659      * Measure the inter-frame interval based on SENSOR_TIMESTAMP for frameCount frames from the
660      * provided capture listener.  If prevTimestamp is positive, it is used for the first interval
661      * calculation; otherwise, the first result is used to establish the starting time.
662      *
663      * Returns the mean interval in the first pair entry, and the largest interval in the second
664      * pair entry
665      */
measureMeanFrameInterval(SimpleCaptureCallback resultListener, int frameCount, long prevTimestamp)666     Pair<Long, Long> measureMeanFrameInterval(SimpleCaptureCallback resultListener, int frameCount,
667             long prevTimestamp) throws Exception {
668         long summedIntervals = 0;
669         long maxInterval = 0;
670         int measurementCount = frameCount - ((prevTimestamp > 0) ? 0 : 1);
671 
672         for (int i = 0; i < frameCount; i++) {
673             CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
674             long timestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
675             if (prevTimestamp > 0) {
676                 long interval = timestamp - prevTimestamp;
677                 if (interval > maxInterval) maxInterval = interval;
678                 summedIntervals += interval;
679             }
680             prevTimestamp = timestamp;
681         }
682         return new Pair<Long, Long>(summedIntervals / measurementCount, maxInterval);
683     }
684 
685 
686     /**
687      * Test preview fps range for all supported ranges. The exposure time are frame duration are
688      * validated.
689      */
previewFpsRangeTestByCamera()690     private void previewFpsRangeTestByCamera() throws Exception {
691         Size maxPreviewSz;
692         Range<Integer>[] fpsRanges = getDescendingTargetFpsRanges(mStaticInfo);
693         boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported();
694         Range<Integer> fpsRange;
695         CaptureRequest.Builder requestBuilder =
696                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
697         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
698 
699         for (int i = 0; i < fpsRanges.length; i += 1) {
700             fpsRange = fpsRanges[i];
701             if (mStaticInfo.isHardwareLevelLegacy()) {
702                 // Legacy devices don't report minimum frame duration for preview sizes. The FPS
703                 // range should be valid for any supported preview size.
704                 maxPreviewSz = mOrderedPreviewSizes.get(0);
705             } else {
706                 maxPreviewSz = getMaxPreviewSizeForFpsRange(fpsRange);
707             }
708 
709             requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
710             // Turn off auto antibanding to avoid exposure time and frame duration interference
711             // from antibanding algorithm.
712             if (antiBandingOffIsSupported) {
713                 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE,
714                         CaptureRequest.CONTROL_AE_ANTIBANDING_MODE_OFF);
715             } else {
716                 // The device doesn't implement the OFF mode, test continues. It need make sure
717                 // that the antibanding algorithm doesn't interfere with the fps range control.
718                 Log.i(TAG, "OFF antibanding mode is not supported, the camera device output must" +
719                         " satisfy the specified fps range regardless of its current antibanding" +
720                         " mode");
721             }
722 
723             startPreview(requestBuilder, maxPreviewSz, resultListener);
724             resultListener = new SimpleCaptureCallback();
725             mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler);
726 
727             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
728 
729             verifyPreviewTargetFpsRange(resultListener, NUM_FRAMES_VERIFIED, fpsRange,
730                     maxPreviewSz);
731             stopPreview();
732             resultListener.drain();
733         }
734     }
735 
verifyPreviewTargetFpsRange(SimpleCaptureCallback resultListener, int numFramesVerified, Range<Integer> fpsRange, Size previewSz)736     private void verifyPreviewTargetFpsRange(SimpleCaptureCallback resultListener,
737             int numFramesVerified, Range<Integer> fpsRange, Size previewSz) {
738         CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
739         List<Integer> capabilities = mStaticInfo.getAvailableCapabilitiesChecked();
740 
741         if (capabilities.contains(CaptureRequest.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
742             long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION);
743             long[] frameDurationRange =
744                     new long[]{(long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())};
745             mCollector.expectInRange(
746                     "Frame duration must be in the range of " + Arrays.toString(frameDurationRange),
747                     frameDuration, (long) (frameDurationRange[0] * (1 - FRAME_DURATION_ERROR_MARGIN)),
748                     (long) (frameDurationRange[1] * (1 + FRAME_DURATION_ERROR_MARGIN)));
749             long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
750             mCollector.expectTrue(String.format("Exposure time %d must be no larger than frame"
751                     + "duration %d", expTime, frameDuration), expTime <= frameDuration);
752 
753             Long minFrameDuration = mMinPreviewFrameDurationMap.get(previewSz);
754             boolean findDuration = mCollector.expectTrue("Unable to find minFrameDuration for size "
755                     + previewSz.toString(), minFrameDuration != null);
756             if (findDuration) {
757                 mCollector.expectTrue("Frame duration " + frameDuration + " must be no smaller than"
758                         + " minFrameDuration " + minFrameDuration, frameDuration >= minFrameDuration);
759             }
760         } else {
761             Log.i(TAG, "verifyPreviewTargetFpsRange - MANUAL_SENSOR control is not supported," +
762                     " skipping duration and exposure time check.");
763         }
764     }
765 
766     /**
767      * Test all supported preview sizes for a camera device
768      *
769      * @throws Exception
770      */
previewTestByCamera()771     private void previewTestByCamera() throws Exception {
772         List<Size> previewSizes = getSupportedPreviewSizes(
773                 mCamera.getId(), mCameraManager, PREVIEW_SIZE_BOUND);
774 
775         for (final Size sz : previewSizes) {
776             if (VERBOSE) {
777                 Log.v(TAG, "Testing camera preview size: " + sz.toString());
778             }
779 
780             // TODO: vary the different settings like crop region to cover more cases.
781             CaptureRequest.Builder requestBuilder =
782                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
783             CaptureCallback mockCaptureCallback =
784                     mock(CameraCaptureSession.CaptureCallback.class);
785 
786             startPreview(requestBuilder, sz, mockCaptureCallback);
787             verifyCaptureResults(mSession, mockCaptureCallback, NUM_FRAMES_VERIFIED,
788                     NUM_FRAMES_VERIFIED * FRAME_TIMEOUT_MS);
789             stopPreview();
790         }
791     }
792 
previewTestPatternTestByCamera()793     private void previewTestPatternTestByCamera() throws Exception {
794         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
795         int[] testPatternModes = mStaticInfo.getAvailableTestPatternModesChecked();
796         CaptureRequest.Builder requestBuilder =
797                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
798         CaptureCallback mockCaptureCallback;
799 
800         final int[] TEST_PATTERN_DATA = {0, 0xFFFFFFFF, 0xFFFFFFFF, 0}; // G:100%, RB:0.
801         for (int mode : testPatternModes) {
802             if (VERBOSE) {
803                 Log.v(TAG, "Test pattern mode: " + mode);
804             }
805             requestBuilder.set(CaptureRequest.SENSOR_TEST_PATTERN_MODE, mode);
806             if (mode == CaptureRequest.SENSOR_TEST_PATTERN_MODE_SOLID_COLOR) {
807                 // Assign color pattern to SENSOR_TEST_PATTERN_MODE_DATA
808                 requestBuilder.set(CaptureRequest.SENSOR_TEST_PATTERN_DATA, TEST_PATTERN_DATA);
809             }
810             mockCaptureCallback = mock(CaptureCallback.class);
811             startPreview(requestBuilder, maxPreviewSize, mockCaptureCallback);
812             verifyCaptureResults(mSession, mockCaptureCallback, NUM_TEST_PATTERN_FRAMES_VERIFIED,
813                     NUM_TEST_PATTERN_FRAMES_VERIFIED * FRAME_TIMEOUT_MS);
814         }
815 
816         stopPreview();
817     }
818 
surfaceSetTestByCamera(String cameraId)819     private void surfaceSetTestByCamera(String cameraId) throws Exception {
820         final int MAX_SURFACE_GROUP_ID = 10;
821         Size maxPreviewSz = mOrderedPreviewSizes.get(0);
822         Size yuvSizeBound = maxPreviewSz; // Default case: legacy device
823         if (mStaticInfo.isHardwareLevelLimited()) {
824             yuvSizeBound = mOrderedVideoSizes.get(0);
825         } else if (mStaticInfo.isHardwareLevelAtLeastFull()) {
826             yuvSizeBound = null;
827         }
828         Size maxYuvSize = getSupportedPreviewSizes(cameraId, mCameraManager, yuvSizeBound).get(0);
829 
830         CaptureRequest.Builder requestBuilder =
831                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
832         ImageDropperListener imageListener = new ImageDropperListener();
833 
834         updatePreviewSurface(maxPreviewSz);
835         createImageReader(maxYuvSize, ImageFormat.YUV_420_888, MAX_READER_IMAGES, imageListener);
836         List<OutputConfiguration> outputConfigs = new ArrayList<OutputConfiguration>();
837         OutputConfiguration previewConfig = new OutputConfiguration(mPreviewSurface);
838         OutputConfiguration yuvConfig = new OutputConfiguration(mReaderSurface);
839         assertEquals(OutputConfiguration.SURFACE_GROUP_ID_NONE, previewConfig.getSurfaceGroupId());
840         assertEquals(OutputConfiguration.SURFACE_GROUP_ID_NONE, yuvConfig.getSurfaceGroupId());
841         assertEquals(mPreviewSurface, previewConfig.getSurface());
842         assertEquals(mReaderSurface, yuvConfig.getSurface());
843         outputConfigs.add(previewConfig);
844         outputConfigs.add(yuvConfig);
845         requestBuilder.addTarget(mPreviewSurface);
846         requestBuilder.addTarget(mReaderSurface);
847 
848         // Test different stream set ID.
849         for (int surfaceGroupId = OutputConfiguration.SURFACE_GROUP_ID_NONE;
850                 surfaceGroupId < MAX_SURFACE_GROUP_ID; surfaceGroupId++) {
851             if (VERBOSE) {
852                 Log.v(TAG, "test preview with surface group id: ");
853             }
854 
855             previewConfig = new OutputConfiguration(surfaceGroupId, mPreviewSurface);
856             yuvConfig = new OutputConfiguration(surfaceGroupId, mReaderSurface);
857             outputConfigs.clear();
858             outputConfigs.add(previewConfig);
859             outputConfigs.add(yuvConfig);
860 
861             for (OutputConfiguration config : outputConfigs) {
862                 assertEquals(surfaceGroupId, config.getSurfaceGroupId());
863             }
864 
865             CameraCaptureSession.StateCallback mockSessionListener =
866                     mock(CameraCaptureSession.StateCallback.class);
867 
868             mSession = configureCameraSessionWithConfig(mCamera, outputConfigs,
869                     mockSessionListener, mHandler);
870 
871 
872             mSession.prepare(mPreviewSurface);
873             verify(mockSessionListener,
874                     timeout(PREPARE_TIMEOUT_MS).times(1)).
875                     onSurfacePrepared(eq(mSession), eq(mPreviewSurface));
876 
877             mSession.prepare(mReaderSurface);
878             verify(mockSessionListener,
879                     timeout(PREPARE_TIMEOUT_MS).times(1)).
880                     onSurfacePrepared(eq(mSession), eq(mReaderSurface));
881 
882             CaptureRequest request = requestBuilder.build();
883             CaptureCallback mockCaptureCallback =
884                     mock(CameraCaptureSession.CaptureCallback.class);
885             mSession.setRepeatingRequest(request, mockCaptureCallback, mHandler);
886             verifyCaptureResults(mSession, mockCaptureCallback, NUM_FRAMES_VERIFIED,
887                     NUM_FRAMES_VERIFIED * FRAME_TIMEOUT_MS);
888         }
889     }
890 
891     private class IsCaptureResultValid implements ArgumentMatcher<TotalCaptureResult> {
892         @Override
matches(TotalCaptureResult obj)893         public boolean matches(TotalCaptureResult obj) {
894             TotalCaptureResult result = obj;
895             Long timeStamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
896             if (timeStamp != null && timeStamp.longValue() > 0L) {
897                 return true;
898             }
899             return false;
900         }
901     }
902 
verifyCaptureResults( CameraCaptureSession session, CaptureCallback mockListener, int expectResultCount, int timeOutMs)903     private void verifyCaptureResults(
904             CameraCaptureSession session,
905             CaptureCallback mockListener,
906             int expectResultCount,
907             int timeOutMs) {
908         // Should receive expected number of onCaptureStarted callbacks.
909         ArgumentCaptor<Long> timestamps = ArgumentCaptor.forClass(Long.class);
910         ArgumentCaptor<Long> frameNumbers = ArgumentCaptor.forClass(Long.class);
911         verify(mockListener,
912                 timeout(timeOutMs).atLeast(expectResultCount))
913                         .onCaptureStarted(
914                                 eq(session),
915                                 isA(CaptureRequest.class),
916                                 timestamps.capture(),
917                                 frameNumbers.capture());
918 
919         // Validate timestamps: all timestamps should be larger than 0 and monotonically increase.
920         long timestamp = 0;
921         for (Long nextTimestamp : timestamps.getAllValues()) {
922             assertNotNull("Next timestamp is null!", nextTimestamp);
923             assertTrue("Captures are out of order", timestamp < nextTimestamp);
924             timestamp = nextTimestamp;
925         }
926 
927         // Validate framenumbers: all framenumbers should be consecutive and positive
928         long frameNumber = -1;
929         for (Long nextFrameNumber : frameNumbers.getAllValues()) {
930             assertNotNull("Next frame number is null!", nextFrameNumber);
931             assertTrue("Captures are out of order",
932                     (frameNumber == -1) || (frameNumber + 1 == nextFrameNumber));
933             frameNumber = nextFrameNumber;
934         }
935 
936         // Should receive expected number of capture results.
937         verify(mockListener,
938                 timeout(timeOutMs).atLeast(expectResultCount))
939                         .onCaptureCompleted(
940                                 eq(session),
941                                 isA(CaptureRequest.class),
942                                 argThat(new IsCaptureResultValid()));
943 
944         // Should not receive any capture failed callbacks.
945         verify(mockListener, never())
946                         .onCaptureFailed(
947                                 eq(session),
948                                 isA(CaptureRequest.class),
949                                 isA(CaptureFailure.class));
950     }
951 
952 }
953