• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.REPORT_LOG_NAME;
20 
21 import static org.junit.Assert.assertNotNull;
22 import static org.junit.Assert.assertTrue;
23 
24 import android.app.Instrumentation;
25 import android.content.Context;
26 import android.graphics.ImageFormat;
27 import android.graphics.SurfaceTexture;
28 import android.hardware.HardwareBuffer;
29 import android.hardware.camera2.CameraAccessException;
30 import android.hardware.camera2.CameraCaptureSession;
31 import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
32 import android.hardware.camera2.CameraCharacteristics;
33 import android.hardware.camera2.CameraDevice;
34 import android.hardware.camera2.CameraMetadata;
35 import android.hardware.camera2.CaptureRequest;
36 import android.hardware.camera2.CaptureResult;
37 import android.hardware.camera2.TotalCaptureResult;
38 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
39 import android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener;
40 import android.hardware.camera2.cts.helpers.StaticMetadata;
41 import android.hardware.camera2.cts.helpers.StaticMetadata.CheckLevel;
42 import android.hardware.camera2.cts.testcases.Camera2AndroidTestRule;
43 import android.hardware.camera2.params.InputConfiguration;
44 import android.hardware.camera2.params.OutputConfiguration;
45 import android.hardware.camera2.params.StreamConfigurationMap;
46 import android.media.Image;
47 import android.media.ImageReader;
48 import android.media.ImageWriter;
49 import android.os.Build;
50 import android.os.Bundle;
51 import android.os.ConditionVariable;
52 import android.os.SystemClock;
53 import android.util.Log;
54 import android.util.Pair;
55 import android.util.Range;
56 import android.util.Size;
57 import android.view.Surface;
58 
59 import androidx.test.InstrumentationRegistry;
60 import androidx.test.rule.ActivityTestRule;
61 
62 import com.android.compatibility.common.util.DeviceReportLog;
63 import com.android.compatibility.common.util.PropertyUtil;
64 import com.android.compatibility.common.util.ResultType;
65 import com.android.compatibility.common.util.ResultUnit;
66 import com.android.compatibility.common.util.Stat;
67 import com.android.ex.camera2.blocking.BlockingSessionCallback;
68 import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
69 import com.android.internal.camera.flags.Flags;
70 
71 import org.junit.Rule;
72 import org.junit.Test;
73 import org.junit.runner.RunWith;
74 import org.junit.runners.JUnit4;
75 
76 import java.util.ArrayList;
77 import java.util.Arrays;
78 import java.util.Collections;
79 import java.util.List;
80 import java.util.ListIterator;
81 import java.util.Objects;
82 import java.util.concurrent.LinkedBlockingQueue;
83 import java.util.concurrent.TimeUnit;
84 
85 /**
86  * Test camera2 API use case performance KPIs, such as camera open time, session creation time,
87  * shutter lag etc. The KPI data will be reported in cts results.
88  */
89 @RunWith(JUnit4.class)
90 public class PerformanceTest {
91     private static final String TAG = "PerformanceTest";
92     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
93     private static final int NUM_TEST_LOOPS = 10;
94     private static final int NUM_MAX_IMAGES = 4;
95     private static final int NUM_RESULTS_WAIT = 30;
96     private static final int[] REPROCESS_FORMATS = {ImageFormat.YUV_420_888, ImageFormat.PRIVATE};
97     private final int MAX_REPROCESS_IMAGES = 6;
98     private final int MAX_JPEG_IMAGES = MAX_REPROCESS_IMAGES;
99     private final int MAX_INPUT_IMAGES = MAX_REPROCESS_IMAGES;
100     // ZSL queue depth should be bigger than the max simultaneous reprocessing capture request
101     // count to maintain reasonable number of candidate image for the worse-case.
102     private final int MAX_ZSL_IMAGES = MAX_REPROCESS_IMAGES * 3 / 2;
103     private final double REPROCESS_STALL_MARGIN = 0.1;
104     private static final int WAIT_FOR_RESULT_TIMEOUT_MS = 3000;
105     private static final int NUM_RESULTS_WAIT_TIMEOUT = 100;
106     private static final int NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY = 8;
107     private static final long FRAME_DURATION_NS_30FPS = 33333333L;
108     private static final int NUM_ZOOM_STEPS = 10;
109     private static final String HAS_ACTIVITY_ARG_KEY = "has-activity";
110 
111     private DeviceReportLog mReportLog;
112 
113     // Used for reading camera output buffers.
114     private ImageReader mCameraZslReader;
115     private SimpleImageReaderListener mCameraZslImageListener;
116     // Used for reprocessing (jpeg) output.
117     private ImageReader mJpegReader;
118     private SimpleImageReaderListener mJpegListener;
119     // Used for reprocessing input.
120     private ImageWriter mWriter;
121     private SimpleCaptureCallback mZslResultListener;
122 
123     private Size mPreviewSize;
124     private Surface mPreviewSurface;
125     private SurfaceTexture mPreviewSurfaceTexture;
126     private int mImageReaderFormat;
127 
128     private static final Instrumentation mInstrumentation =
129             InstrumentationRegistry.getInstrumentation();
130     private static final Context mContext = InstrumentationRegistry.getTargetContext();
131 
132     @Rule
133     public final Camera2AndroidTestRule mTestRule = new Camera2AndroidTestRule(mContext);
134 
135     // b/284352937: Display an activity with SurfaceView so that camera's effect on refresh
136     // rate takes precedence.
137     //
138     // - If no activity is displayed, home screen would vote for a completely different refresh
139     // rate. Some examples are 24hz and 144hz. These doesn't reflect the actual refresh rate
140     // when camera runs with a SurfaceView.
141     // - The testSurfaceViewJitterReduction needs to read timestamps for each output image. If
142     // we directly connect camera to SurfaceView, we won't have access to timestamps.
143     //
144     // So the solution is that if no activity already exists, create an activity with SurfaceView,
145     // but not connect it to camera.
146     @Rule
147     public final ActivityTestRule<Camera2SurfaceViewCtsActivity> mActivityRule =
148             createActivityRuleIfNeeded();
149 
createActivityRuleIfNeeded()150     private static ActivityTestRule<Camera2SurfaceViewCtsActivity> createActivityRuleIfNeeded() {
151         Bundle bundle = InstrumentationRegistry.getArguments();
152         byte hasActivity = bundle.getByte(HAS_ACTIVITY_ARG_KEY);
153 
154         // If the caller already has an activity, do not create the ActivityTestRule.
155         if (hasActivity != 0) {
156             return null;
157         } else {
158             return new ActivityTestRule<>(Camera2SurfaceViewCtsActivity.class);
159         }
160     }
161 
162     /**
163      * Test camera launch KPI: the time duration between a camera device is
164      * being opened and first preview frame is available.
165      * <p>
166      * It includes camera open time, session creation time, and sending first
167      * preview request processing latency etc. For the SurfaceView based preview use
168      * case, there is no way for client to know the exact preview frame
169      * arrival time. To approximate this time, a companion YUV420_888 stream is
170      * created. The first YUV420_888 Image coming out of the ImageReader is treated
171      * as the first preview arrival time.</p>
172      * <p>
173      * For depth-only devices, timing is done with the DEPTH16 format instead.
174      * </p>
175      */
176     @Test
testCameraLaunch()177     public void testCameraLaunch() throws Exception {
178         double[] avgCameraLaunchTimes = new double[mTestRule.getCameraIdsUnderTest().length];
179 
180         int counter = 0;
181         for (String id : mTestRule.getCameraIdsUnderTest()) {
182             // Do NOT move these variables to outer scope
183             // They will be passed to DeviceReportLog and their references will be stored
184             String streamName = "test_camera_launch";
185             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
186             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
187             double[] cameraOpenTimes = new double[NUM_TEST_LOOPS];
188             double[] configureStreamTimes = new double[NUM_TEST_LOOPS];
189             double[] startPreviewTimes = new double[NUM_TEST_LOOPS];
190             double[] stopPreviewTimes = new double[NUM_TEST_LOOPS];
191             double[] cameraCloseTimes = new double[NUM_TEST_LOOPS];
192             double[] cameraLaunchTimes = new double[NUM_TEST_LOOPS];
193             try {
194                 CameraCharacteristics ch =
195                         mTestRule.getCameraManager().getCameraCharacteristics(id);
196                 mTestRule.setStaticInfo(new StaticMetadata(ch));
197                 boolean isColorOutputSupported = mTestRule.getStaticInfo().isColorOutputSupported();
198                 if (isColorOutputSupported) {
199                     initializeImageReader(id, ImageFormat.YUV_420_888);
200                 } else {
201                     assertTrue("Depth output must be supported if regular output isn't!",
202                             mTestRule.getStaticInfo().isDepthOutputSupported());
203                     initializeImageReader(id, ImageFormat.DEPTH16);
204                 }
205                 updatePreviewSurface(mPreviewSize);
206 
207                 SimpleImageListener imageListener = null;
208                 long startTimeMs, openTimeMs, configureTimeMs, previewStartedTimeMs;
209                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
210                     try {
211                         // Need create a new listener every iteration to be able to wait
212                         // for the first image comes out.
213                         imageListener = new SimpleImageListener();
214                         mTestRule.getReader().setOnImageAvailableListener(
215                                 imageListener, mTestRule.getHandler());
216                         startTimeMs = SystemClock.elapsedRealtime();
217 
218                         // Blocking open camera
219                         simpleOpenCamera(id);
220                         openTimeMs = SystemClock.elapsedRealtime();
221                         cameraOpenTimes[i] = openTimeMs - startTimeMs;
222 
223                         // Blocking configure outputs.
224                         CaptureRequest previewRequest =
225                                 configureReaderAndPreviewOutputs(id, isColorOutputSupported);
226                         configureTimeMs = SystemClock.elapsedRealtime();
227                         configureStreamTimes[i] = configureTimeMs - openTimeMs;
228 
229                         // Blocking start preview (start preview to first image arrives)
230                         SimpleCaptureCallback resultListener =
231                                 new SimpleCaptureCallback();
232                         blockingStartPreview(id, resultListener, previewRequest, imageListener);
233                         previewStartedTimeMs = SystemClock.elapsedRealtime();
234                         startPreviewTimes[i] = previewStartedTimeMs - configureTimeMs;
235                         cameraLaunchTimes[i] = previewStartedTimeMs - startTimeMs;
236 
237                         // Let preview on for a couple of frames
238                         CameraTestUtils.waitForNumResults(resultListener, NUM_RESULTS_WAIT,
239                                 WAIT_FOR_RESULT_TIMEOUT_MS);
240 
241                         // Blocking stop preview
242                         startTimeMs = SystemClock.elapsedRealtime();
243                         blockingStopRepeating();
244                         stopPreviewTimes[i] = SystemClock.elapsedRealtime() - startTimeMs;
245                     }
246                     finally {
247                         // Blocking camera close
248                         startTimeMs = SystemClock.elapsedRealtime();
249                         mTestRule.closeDevice(id);
250                         cameraCloseTimes[i] = SystemClock.elapsedRealtime() - startTimeMs;
251                     }
252                 }
253 
254                 avgCameraLaunchTimes[counter] = Stat.getAverage(cameraLaunchTimes);
255                 // Finish the data collection, report the KPIs.
256                 // ReportLog keys have to be lowercase underscored format.
257                 mReportLog.addValues("camera_open_time", cameraOpenTimes, ResultType.LOWER_BETTER,
258                         ResultUnit.MS);
259                 mReportLog.addValues("camera_configure_stream_time", configureStreamTimes,
260                         ResultType.LOWER_BETTER, ResultUnit.MS);
261                 mReportLog.addValues("camera_start_preview_time", startPreviewTimes,
262                         ResultType.LOWER_BETTER, ResultUnit.MS);
263                 mReportLog.addValues("camera_camera_stop_preview", stopPreviewTimes,
264                         ResultType.LOWER_BETTER, ResultUnit.MS);
265                 mReportLog.addValues("camera_camera_close_time", cameraCloseTimes,
266                         ResultType.LOWER_BETTER, ResultUnit.MS);
267                 mReportLog.addValues("camera_launch_time", cameraLaunchTimes,
268                         ResultType.LOWER_BETTER, ResultUnit.MS);
269             }
270             finally {
271                 mTestRule.closeDefaultImageReader();
272                 closePreviewSurface();
273             }
274             counter++;
275             mReportLog.submit(mInstrumentation);
276 
277             if (VERBOSE) {
278                 Log.v(TAG, "Camera " + id + " device open times(ms): "
279                         + Arrays.toString(cameraOpenTimes)
280                         + ". Average(ms): " + Stat.getAverage(cameraOpenTimes)
281                         + ". Min(ms): " + Stat.getMin(cameraOpenTimes)
282                         + ". Max(ms): " + Stat.getMax(cameraOpenTimes));
283                 Log.v(TAG, "Camera " + id + " configure stream times(ms): "
284                         + Arrays.toString(configureStreamTimes)
285                         + ". Average(ms): " + Stat.getAverage(configureStreamTimes)
286                         + ". Min(ms): " + Stat.getMin(configureStreamTimes)
287                         + ". Max(ms): " + Stat.getMax(configureStreamTimes));
288                 Log.v(TAG, "Camera " + id + " start preview times(ms): "
289                         + Arrays.toString(startPreviewTimes)
290                         + ". Average(ms): " + Stat.getAverage(startPreviewTimes)
291                         + ". Min(ms): " + Stat.getMin(startPreviewTimes)
292                         + ". Max(ms): " + Stat.getMax(startPreviewTimes));
293                 Log.v(TAG, "Camera " + id + " stop preview times(ms): "
294                         + Arrays.toString(stopPreviewTimes)
295                         + ". Average(ms): " + Stat.getAverage(stopPreviewTimes)
296                         + ". nMin(ms): " + Stat.getMin(stopPreviewTimes)
297                         + ". nMax(ms): " + Stat.getMax(stopPreviewTimes));
298                 Log.v(TAG, "Camera " + id + " device close times(ms): "
299                         + Arrays.toString(cameraCloseTimes)
300                         + ". Average(ms): " + Stat.getAverage(cameraCloseTimes)
301                         + ". Min(ms): " + Stat.getMin(cameraCloseTimes)
302                         + ". Max(ms): " + Stat.getMax(cameraCloseTimes));
303                 Log.v(TAG, "Camera " + id + " camera launch times(ms): "
304                         + Arrays.toString(cameraLaunchTimes)
305                         + ". Average(ms): " + Stat.getAverage(cameraLaunchTimes)
306                         + ". Min(ms): " + Stat.getMin(cameraLaunchTimes)
307                         + ". Max(ms): " + Stat.getMax(cameraLaunchTimes));
308             }
309         }
310         if (mTestRule.getCameraIdsUnderTest().length != 0) {
311             String streamName = "test_camera_launch_average";
312             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
313             mReportLog.setSummary("camera_launch_average_time_for_all_cameras",
314                     Stat.getAverage(avgCameraLaunchTimes), ResultType.LOWER_BETTER, ResultUnit.MS);
315             mReportLog.submit(mInstrumentation);
316         }
317     }
318 
319     /**
320      * Test camera capture KPI for YUV_420_888, PRIVATE, JPEG, RAW and RAW+JPEG
321      * formats: the time duration between sending out a single image capture request
322      * and receiving image data and capture result.
323      * <p>
324      * It enumerates the following metrics: capture latency, computed by
325      * measuring the time between sending out the capture request and getting
326      * the image data; partial result latency, computed by measuring the time
327      * between sending out the capture request and getting the partial result;
328      * capture result latency, computed by measuring the time between sending
329      * out the capture request and getting the full capture result.
330      * </p>
331      */
332     @Test
testSingleCapture()333     public void testSingleCapture() throws Exception {
334         int[] JPEG_FORMAT = {ImageFormat.JPEG};
335         testSingleCaptureForFormat(JPEG_FORMAT, "jpeg", /*addPreviewDelay*/ true);
336         if (!mTestRule.isPerfMeasure()) {
337             int[] JPEG_R_FORMAT = {ImageFormat.JPEG_R};
338             testSingleCaptureForFormat(JPEG_R_FORMAT, "jpeg_r", /*addPreviewDelay*/ true,
339                     /*enablePostview*/ false);
340             if (Flags.cameraHeifGainmap()) {
341                 int[] HEIC_ULTRAHDR_FORMAT = {ImageFormat.HEIC_ULTRAHDR};
342                 testSingleCaptureForFormat(HEIC_ULTRAHDR_FORMAT, "heic_ultrahdr",
343                         /*addPreviewDelay*/ true, /*enablePostview*/ false);
344             }
345             int[] YUV_FORMAT = {ImageFormat.YUV_420_888};
346             testSingleCaptureForFormat(YUV_FORMAT, null, /*addPreviewDelay*/ true);
347             int[] PRIVATE_FORMAT = {ImageFormat.PRIVATE};
348             testSingleCaptureForFormat(PRIVATE_FORMAT, "private", /*addPreviewDelay*/ true);
349             int[] RAW_FORMAT = {ImageFormat.RAW_SENSOR};
350             testSingleCaptureForFormat(RAW_FORMAT, "raw", /*addPreviewDelay*/ true);
351             int[] RAW_JPEG_FORMATS = {ImageFormat.RAW_SENSOR, ImageFormat.JPEG};
352             testSingleCaptureForFormat(RAW_JPEG_FORMATS, "raw_jpeg", /*addPreviewDelay*/ true);
353         }
354     }
355 
appendFormatDescription(String message, String formatDescription)356     private String appendFormatDescription(String message, String formatDescription) {
357         if (message == null) {
358             return null;
359         }
360 
361         String ret = message;
362         if (formatDescription != null) {
363             ret = String.format(ret + "_%s", formatDescription);
364         }
365 
366         return ret;
367     }
368 
testSingleCaptureForFormat(int[] formats, String formatDescription, boolean addPreviewDelay)369     private void testSingleCaptureForFormat(int[] formats, String formatDescription,
370             boolean addPreviewDelay) throws Exception {
371        testSingleCaptureForFormat(formats, formatDescription, addPreviewDelay,
372                /*enablePostview*/ true);
373     }
374 
testSingleCaptureForFormat(int[] formats, String formatDescription, boolean addPreviewDelay, boolean enablePostview)375     private void testSingleCaptureForFormat(int[] formats, String formatDescription,
376             boolean addPreviewDelay, boolean enablePostview) throws Exception {
377         double[] avgResultTimes = new double[mTestRule.getCameraIdsUnderTest().length];
378         double[] avgCaptureTimes = new double[mTestRule.getCameraIdsUnderTest().length];
379 
380         int counter = 0;
381         for (String id : mTestRule.getCameraIdsUnderTest()) {
382             // Do NOT move these variables to outer scope
383             // They will be passed to DeviceReportLog and their references will be stored
384             String streamName = appendFormatDescription("test_single_capture", formatDescription);
385             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
386             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
387             double[] captureTimes = new double[NUM_TEST_LOOPS];
388             double[] getPartialTimes = new double[NUM_TEST_LOOPS];
389             double[] getResultTimes = new double[NUM_TEST_LOOPS];
390             ImageReader[] readers = null;
391             try {
392                 if (!mTestRule.getAllStaticInfo().get(id).isColorOutputSupported()) {
393                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
394                     continue;
395                 }
396 
397                 StreamConfigurationMap configMap = mTestRule.getAllStaticInfo().get(
398                         id).getCharacteristics().get(
399                         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
400                 boolean formatsSupported = true;
401                 for (int format : formats) {
402                     if (!configMap.isOutputSupportedFor(format)) {
403                         Log.i(TAG, "Camera " + id + " does not support output format: " + format +
404                                 " skipping");
405                         formatsSupported = false;
406                         break;
407                     }
408                 }
409                 if (!formatsSupported) {
410                     continue;
411                 }
412 
413                 mTestRule.openDevice(id);
414 
415                 boolean partialsExpected = mTestRule.getStaticInfo().getPartialResultCount() > 1;
416                 long startTimeMs;
417                 boolean isPartialTimingValid = partialsExpected;
418                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
419 
420                     // setup builders and listeners
421                     CaptureRequest.Builder previewBuilder =
422                             mTestRule.getCamera().createCaptureRequest(
423                                     CameraDevice.TEMPLATE_PREVIEW);
424                     CaptureRequest.Builder captureBuilder =
425                             mTestRule.getCamera().createCaptureRequest(
426                                     CameraDevice.TEMPLATE_STILL_CAPTURE);
427                     SimpleCaptureCallback previewResultListener =
428                             new SimpleCaptureCallback();
429                     SimpleTimingResultListener captureResultListener =
430                             new SimpleTimingResultListener();
431                     SimpleImageListener[] imageListeners = new SimpleImageListener[formats.length];
432                     Size[] imageSizes = new Size[formats.length];
433                     for (int j = 0; j < formats.length; j++) {
434                         Size sizeBound = mTestRule.isPerfClassTest() ? new Size(1920, 1080) : null;
435                         imageSizes[j] = CameraTestUtils.getSortedSizesForFormat(
436                                 id,
437                                 mTestRule.getCameraManager(),
438                                 formats[j],
439                                 sizeBound).get(0);
440                         imageListeners[j] = new SimpleImageListener();
441                     }
442 
443                     readers = prepareStillCaptureAndStartPreview(id, previewBuilder, captureBuilder,
444                             mTestRule.getOrderedPreviewSizes().get(0), imageSizes, formats,
445                             previewResultListener, NUM_MAX_IMAGES, imageListeners, enablePostview);
446 
447                     if (addPreviewDelay) {
448                         Thread.sleep(500);
449                     }
450 
451                     // Capture an image and get image data
452                     startTimeMs = SystemClock.elapsedRealtime();
453                     CaptureRequest request = captureBuilder.build();
454                     mTestRule.getCameraSession().capture(
455                             request, captureResultListener, mTestRule.getHandler());
456 
457                     Pair<CaptureResult, Long> partialResultNTime = null;
458                     if (partialsExpected) {
459                         partialResultNTime = captureResultListener.getPartialResultNTimeForRequest(
460                                 request, NUM_RESULTS_WAIT);
461                         // Even if maxPartials > 1, may not see partials for some devices
462                         if (partialResultNTime == null) {
463                             partialsExpected = false;
464                             isPartialTimingValid = false;
465                         }
466                     }
467                     Pair<CaptureResult, Long> captureResultNTime =
468                             captureResultListener.getCaptureResultNTimeForRequest(
469                                     request, NUM_RESULTS_WAIT);
470 
471                     double [] imageTimes = new double[formats.length];
472                     for (int j = 0; j < formats.length; j++) {
473                         imageListeners[j].waitForImageAvailable(
474                                 CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
475                         imageTimes[j] = imageListeners[j].getTimeReceivedImage();
476                     }
477 
478                     captureTimes[i] = Stat.getAverage(imageTimes) - startTimeMs;
479                     if (partialsExpected) {
480                         getPartialTimes[i] = partialResultNTime.second - startTimeMs;
481                         if (getPartialTimes[i] < 0) {
482                             isPartialTimingValid = false;
483                         }
484                     }
485                     getResultTimes[i] = captureResultNTime.second - startTimeMs;
486 
487                     // simulate real scenario (preview runs a bit)
488                     CameraTestUtils.waitForNumResults(previewResultListener, NUM_RESULTS_WAIT,
489                             WAIT_FOR_RESULT_TIMEOUT_MS);
490 
491                     blockingStopRepeating();
492 
493                     CameraTestUtils.closeImageReaders(readers);
494                     readers = null;
495                 }
496                 String message = appendFormatDescription("camera_capture_latency",
497                         formatDescription);
498                 mReportLog.addValues(message, captureTimes, ResultType.LOWER_BETTER, ResultUnit.MS);
499                 // If any of the partial results do not contain AE and AF state, then no report
500                 if (isPartialTimingValid) {
501                     message = appendFormatDescription("camera_partial_result_latency",
502                             formatDescription);
503                     mReportLog.addValues(message, getPartialTimes, ResultType.LOWER_BETTER,
504                             ResultUnit.MS);
505                 }
506                 message = appendFormatDescription("camera_capture_result_latency",
507                         formatDescription);
508                 mReportLog.addValues(message, getResultTimes, ResultType.LOWER_BETTER,
509                         ResultUnit.MS);
510 
511                 avgResultTimes[counter] = Stat.getAverage(getResultTimes);
512                 avgCaptureTimes[counter] = Stat.getAverage(captureTimes);
513             }
514             finally {
515                 CameraTestUtils.closeImageReaders(readers);
516                 readers = null;
517                 mTestRule.closeDevice(id);
518                 closePreviewSurface();
519             }
520             counter++;
521             mReportLog.submit(mInstrumentation);
522         }
523 
524         // Result will not be reported in CTS report if no summary is printed.
525         if (mTestRule.getCameraIdsUnderTest().length != 0) {
526             String streamName = appendFormatDescription("test_single_capture_average",
527                     formatDescription);
528             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
529             // In performance measurement mode, capture the buffer latency rather than result
530             // latency.
531             if (mTestRule.isPerfMeasure()) {
532                 String message = appendFormatDescription(
533                         "camera_capture_average_latency_for_all_cameras", formatDescription);
534                 mReportLog.setSummary(message, Stat.getAverage(avgCaptureTimes),
535                         ResultType.LOWER_BETTER, ResultUnit.MS);
536             } else {
537                 String message = appendFormatDescription(
538                         "camera_capture_result_average_latency_for_all_cameras", formatDescription);
539                 mReportLog.setSummary(message, Stat.getAverage(avgResultTimes),
540                         ResultType.LOWER_BETTER, ResultUnit.MS);
541             }
542             mReportLog.submit(mInstrumentation);
543         }
544     }
545 
546     /**
547      * Test multiple capture KPI for YUV_420_888 format: the average time duration
548      * between sending out image capture requests and receiving capture results.
549      * <p>
550      * It measures capture latency, which is the time between sending out the capture
551      * request and getting the full capture result, and the frame duration, which is the timestamp
552      * gap between results.
553      * </p>
554      */
555     @Test
testMultipleCapture()556     public void testMultipleCapture() throws Exception {
557         double[] avgResultTimes = new double[mTestRule.getCameraIdsUnderTest().length];
558         double[] avgDurationMs = new double[mTestRule.getCameraIdsUnderTest().length];
559 
560         // A simple CaptureSession StateCallback to handle onCaptureQueueEmpty
561         class MultipleCaptureStateCallback extends CameraCaptureSession.StateCallback {
562             private ConditionVariable captureQueueEmptyCond = new ConditionVariable();
563             private int captureQueueEmptied = 0;
564 
565             @Override
566             public void onConfigured(CameraCaptureSession session) {
567                 // Empty implementation
568             }
569 
570             @Override
571             public void onConfigureFailed(CameraCaptureSession session) {
572                 // Empty implementation
573             }
574 
575             @Override
576             public void onCaptureQueueEmpty(CameraCaptureSession session) {
577                 captureQueueEmptied++;
578                 if (VERBOSE) {
579                     Log.v(TAG, "onCaptureQueueEmpty received. captureQueueEmptied = "
580                             + captureQueueEmptied);
581                 }
582 
583                 captureQueueEmptyCond.open();
584             }
585 
586             /* Wait for onCaptureQueueEmpty, return immediately if an onCaptureQueueEmpty was
587              * already received, otherwise, wait for one to arrive. */
588             public void waitForCaptureQueueEmpty(long timeout) {
589                 if (captureQueueEmptied > 0) {
590                     captureQueueEmptied--;
591                     return;
592                 }
593 
594                 if (captureQueueEmptyCond.block(timeout)) {
595                     captureQueueEmptyCond.close();
596                     captureQueueEmptied = 0;
597                 } else {
598                     throw new TimeoutRuntimeException("Unable to receive onCaptureQueueEmpty after "
599                             + timeout + "ms");
600                 }
601             }
602         }
603 
604         final MultipleCaptureStateCallback sessionListener = new MultipleCaptureStateCallback();
605 
606         int counter = 0;
607         for (String id : mTestRule.getCameraIdsUnderTest()) {
608             // Do NOT move these variables to outer scope
609             // They will be passed to DeviceReportLog and their references will be stored
610             String streamName = "test_multiple_capture";
611             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
612             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
613             long[] startTimes = new long[NUM_MAX_IMAGES];
614             double[] getResultTimes = new double[NUM_MAX_IMAGES];
615             double[] frameDurationMs = new double[NUM_MAX_IMAGES-1];
616             try {
617                 StaticMetadata staticMetadata = mTestRule.getAllStaticInfo().get(id);
618                 if (!staticMetadata.isColorOutputSupported()) {
619                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
620                     continue;
621                 }
622                 boolean useSessionKeys = isFpsRangeASessionKey(staticMetadata.getCharacteristics());
623 
624                 mTestRule.openDevice(id);
625                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
626 
627                     // setup builders and listeners
628                     CaptureRequest.Builder previewBuilder =
629                             mTestRule.getCamera().createCaptureRequest(
630                                     CameraDevice.TEMPLATE_PREVIEW);
631                     CaptureRequest.Builder captureBuilder =
632                             mTestRule.getCamera().createCaptureRequest(
633                                     CameraDevice.TEMPLATE_STILL_CAPTURE);
634                     SimpleCaptureCallback previewResultListener =
635                             new SimpleCaptureCallback();
636                     SimpleTimingResultListener captureResultListener =
637                             new SimpleTimingResultListener();
638                     SimpleImageReaderListener imageListener =
639                             new SimpleImageReaderListener(/*asyncMode*/true, NUM_MAX_IMAGES);
640 
641                     Size maxYuvSize = CameraTestUtils.getSortedSizesForFormat(
642                             id, mTestRule.getCameraManager(),
643                             ImageFormat.YUV_420_888, /*bound*/null).get(0);
644                     // Find minimum frame duration for YUV_420_888
645                     StreamConfigurationMap config =
646                             mTestRule.getStaticInfo().getCharacteristics().get(
647                             CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
648 
649                     final long minStillFrameDuration =
650                             config.getOutputMinFrameDuration(ImageFormat.YUV_420_888, maxYuvSize);
651                     if (minStillFrameDuration > 0) {
652                         Range<Integer> targetRange =
653                                 CameraTestUtils.getSuitableFpsRangeForDuration(id,
654                                         minStillFrameDuration, mTestRule.getStaticInfo());
655                         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
656                         captureBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
657                     }
658 
659                     prepareCaptureAndStartPreview(previewBuilder, captureBuilder,
660                             mTestRule.getOrderedPreviewSizes().get(0), maxYuvSize,
661                             ImageFormat.YUV_420_888, previewResultListener,
662                             sessionListener, NUM_MAX_IMAGES, imageListener,
663                             useSessionKeys);
664 
665                     // Converge AE
666                     CameraTestUtils.waitForAeStable(previewResultListener,
667                             NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY, mTestRule.getStaticInfo(),
668                             WAIT_FOR_RESULT_TIMEOUT_MS, NUM_RESULTS_WAIT_TIMEOUT);
669 
670                     if (mTestRule.getStaticInfo().isAeLockSupported()) {
671                         // Lock AE if possible to improve stability
672                         previewBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
673                         mTestRule.getCameraSession().setRepeatingRequest(previewBuilder.build(),
674                                 previewResultListener, mTestRule.getHandler());
675                         if (mTestRule.getStaticInfo().isHardwareLevelAtLeastLimited()) {
676                             // Legacy mode doesn't output AE state
677                             CameraTestUtils.waitForResultValue(previewResultListener,
678                                     CaptureResult.CONTROL_AE_STATE,
679                                     CaptureResult.CONTROL_AE_STATE_LOCKED,
680                                     NUM_RESULTS_WAIT_TIMEOUT, WAIT_FOR_RESULT_TIMEOUT_MS);
681                         }
682                     }
683 
684                     // Capture NUM_MAX_IMAGES images based on onCaptureQueueEmpty callback
685                     for (int j = 0; j < NUM_MAX_IMAGES; j++) {
686 
687                         // Capture an image and get image data
688                         startTimes[j] = SystemClock.elapsedRealtime();
689                         CaptureRequest request = captureBuilder.build();
690                         mTestRule.getCameraSession().capture(
691                                 request, captureResultListener, mTestRule.getHandler());
692 
693                         // Wait for capture queue empty for the current request
694                         sessionListener.waitForCaptureQueueEmpty(
695                                 CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
696                     }
697 
698                     // Acquire the capture result time and frame duration
699                     long prevTimestamp = -1;
700                     for (int j = 0; j < NUM_MAX_IMAGES; j++) {
701                         Pair<CaptureResult, Long> captureResultNTime =
702                                 captureResultListener.getCaptureResultNTime(
703                                         CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
704 
705                         getResultTimes[j] +=
706                                 (double)(captureResultNTime.second - startTimes[j])/NUM_TEST_LOOPS;
707 
708                         // Collect inter-frame timestamp
709                         long timestamp = captureResultNTime.first.get(
710                                 CaptureResult.SENSOR_TIMESTAMP);
711                         if (prevTimestamp != -1) {
712                             frameDurationMs[j-1] +=
713                                     (double)(timestamp - prevTimestamp)/(
714                                             NUM_TEST_LOOPS * 1000000.0);
715                         }
716                         prevTimestamp = timestamp;
717                     }
718 
719                     // simulate real scenario (preview runs a bit)
720                     CameraTestUtils.waitForNumResults(previewResultListener, NUM_RESULTS_WAIT,
721                             WAIT_FOR_RESULT_TIMEOUT_MS);
722 
723                     stopRepeating();
724                 }
725 
726                 for (int i = 0; i < getResultTimes.length; i++) {
727                     Log.v(TAG, "Camera " + id + " result time[" + i + "] is " +
728                             getResultTimes[i] + " ms");
729                 }
730                 for (int i = 0; i < NUM_MAX_IMAGES-1; i++) {
731                     Log.v(TAG, "Camera " + id + " frame duration time[" + i + "] is " +
732                             frameDurationMs[i] + " ms");
733                 }
734 
735                 mReportLog.addValues("camera_multiple_capture_result_latency", getResultTimes,
736                         ResultType.LOWER_BETTER, ResultUnit.MS);
737                 mReportLog.addValues("camera_multiple_capture_frame_duration", frameDurationMs,
738                         ResultType.LOWER_BETTER, ResultUnit.MS);
739 
740 
741                 avgResultTimes[counter] = Stat.getAverage(getResultTimes);
742                 avgDurationMs[counter] = Stat.getAverage(frameDurationMs);
743             }
744             finally {
745                 mTestRule.closeDefaultImageReader();
746                 mTestRule.closeDevice(id);
747                 closePreviewSurface();
748             }
749             counter++;
750             mReportLog.submit(mInstrumentation);
751         }
752 
753         // Result will not be reported in CTS report if no summary is printed.
754         if (mTestRule.getCameraIdsUnderTest().length != 0) {
755             String streamName = "test_multiple_capture_average";
756             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
757             mReportLog.setSummary("camera_multiple_capture_result_average_latency_for_all_cameras",
758                     Stat.getAverage(avgResultTimes), ResultType.LOWER_BETTER, ResultUnit.MS);
759             mReportLog.submit(mInstrumentation);
760             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
761             mReportLog.setSummary("camera_multiple_capture_frame_duration_average_for_all_cameras",
762                     Stat.getAverage(avgDurationMs), ResultType.LOWER_BETTER, ResultUnit.MS);
763             mReportLog.submit(mInstrumentation);
764         }
765     }
766 
767     /**
768      * Test reprocessing shot-to-shot latency with default NR and edge options, i.e., from the time
769      * a reprocess request is issued to the time the reprocess image is returned.
770      */
771     @Test
testReprocessingLatency()772     public void testReprocessingLatency() throws Exception {
773         for (String id : mTestRule.getCameraIdsUnderTest()) {
774             for (int format : REPROCESS_FORMATS) {
775                 if (!isReprocessSupported(id, format)) {
776                     continue;
777                 }
778 
779                 try {
780                     mTestRule.openDevice(id);
781                     String streamName = "test_reprocessing_latency";
782                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
783                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
784                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
785                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/false,
786                             /*highQuality*/false);
787                 } finally {
788                     closeReaderWriters();
789                     mTestRule.closeDevice(id);
790                     closePreviewSurface();
791                     mReportLog.submit(mInstrumentation);
792                 }
793             }
794         }
795     }
796 
797     /**
798      * Test reprocessing throughput with default NR and edge options,
799      * i.e., how many frames can be reprocessed during a given amount of time.
800      *
801      */
802     @Test
testReprocessingThroughput()803     public void testReprocessingThroughput() throws Exception {
804         for (String id : mTestRule.getCameraIdsUnderTest()) {
805             for (int format : REPROCESS_FORMATS) {
806                 if (!isReprocessSupported(id, format)) {
807                     continue;
808                 }
809 
810                 try {
811                     mTestRule.openDevice(id);
812                     String streamName = "test_reprocessing_throughput";
813                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
814                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
815                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
816                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/true,
817                             /*highQuality*/false);
818                 } finally {
819                     closeReaderWriters();
820                     mTestRule.closeDevice(id);
821                     closePreviewSurface();
822                     mReportLog.submit(mInstrumentation);
823                 }
824             }
825         }
826     }
827 
828     /**
829      * Test reprocessing shot-to-shot latency with High Quality NR and edge options, i.e., from the
830      * time a reprocess request is issued to the time the reprocess image is returned.
831      */
832     @Test
testHighQualityReprocessingLatency()833     public void testHighQualityReprocessingLatency() throws Exception {
834         for (String id : mTestRule.getCameraIdsUnderTest()) {
835             for (int format : REPROCESS_FORMATS) {
836                 if (!isReprocessSupported(id, format)) {
837                     continue;
838                 }
839 
840                 try {
841                     mTestRule.openDevice(id);
842                     String streamName = "test_high_quality_reprocessing_latency";
843                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
844                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
845                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
846                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/false,
847                             /*requireHighQuality*/true);
848                 } finally {
849                     closeReaderWriters();
850                     mTestRule.closeDevice(id);
851                     closePreviewSurface();
852                     mReportLog.submit(mInstrumentation);
853                 }
854             }
855         }
856     }
857 
858     /**
859      * Test reprocessing throughput with high quality NR and edge options, i.e., how many frames can
860      * be reprocessed during a given amount of time.
861      *
862      */
863     @Test
testHighQualityReprocessingThroughput()864     public void testHighQualityReprocessingThroughput() throws Exception {
865         for (String id : mTestRule.getCameraIdsUnderTest()) {
866             for (int format : REPROCESS_FORMATS) {
867                 if (!isReprocessSupported(id, format)) {
868                     continue;
869                 }
870 
871                 try {
872                     mTestRule.openDevice(id);
873                     String streamName = "test_high_quality_reprocessing_throughput";
874                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
875                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
876                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
877                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/true,
878                             /*requireHighQuality*/true);
879                 } finally {
880                     closeReaderWriters();
881                     mTestRule.closeDevice(id);
882                     closePreviewSurface();
883                     mReportLog.submit(mInstrumentation);
884                 }
885             }
886         }
887     }
888 
889     /**
890      * Testing reprocessing caused preview stall (frame drops)
891      */
892     @Test
testReprocessingCaptureStall()893     public void testReprocessingCaptureStall() throws Exception {
894         for (String id : mTestRule.getCameraIdsUnderTest()) {
895             for (int format : REPROCESS_FORMATS) {
896                 if (!isReprocessSupported(id, format)) {
897                     continue;
898                 }
899 
900                 try {
901                     mTestRule.openDevice(id);
902                     String streamName = "test_reprocessing_capture_stall";
903                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
904                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
905                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
906                     reprocessingCaptureStallTestByCamera(format);
907                 } finally {
908                     closeReaderWriters();
909                     mTestRule.closeDevice(id);
910                     closePreviewSurface();
911                     mReportLog.submit(mInstrumentation);
912                 }
913             }
914         }
915     }
916 
917     // Direction of zoom: in or out
918     private enum ZoomDirection {
919         ZOOM_IN,
920         ZOOM_OUT;
921     }
922 
923     // Range of zoom: >= 1.0x, <= 1.0x, or full range.
924     private enum ZoomRange {
925         RATIO_1_OR_LARGER,
926         RATIO_1_OR_SMALLER,
927         RATIO_FULL_RANGE;
928     }
929 
930     /**
931      * Testing Zoom settings override performance for zoom in from 1.0x
932      *
933      * The range of zoomRatio being tested is [1.0x, maxZoomRatio]
934      * The test is skipped if minZoomRatio == 1.0x.
935      */
936     @Test
testZoomSettingsOverrideLatencyInFrom1x()937     public void testZoomSettingsOverrideLatencyInFrom1x() throws Exception {
938         testZoomSettingsOverrideLatency("zoom_in_from_1x",
939                 ZoomDirection.ZOOM_IN, ZoomRange.RATIO_1_OR_LARGER,
940                 /*checkSmoothZoom*/ false);
941     }
942 
943     /**
944      * Testing Zoom settings override performance for zoom out to 1.0x
945      *
946      * The range of zoomRatio being tested is [maxZoomRatio, 1.0x]
947      * The test is skipped if minZoomRatio == 1.0x.
948      */
949     @Test
testZoomSettingsOverrideLatencyOutTo1x()950     public void testZoomSettingsOverrideLatencyOutTo1x() throws Exception {
951         testZoomSettingsOverrideLatency("zoom_out_to_1x",
952                 ZoomDirection.ZOOM_OUT, ZoomRange.RATIO_1_OR_LARGER,
953                 /*checkSmoothZoom*/ false);
954     }
955 
956     /**
957      * Testing Zoom settings override performance for zoom out from 1.0x
958      *
959      * The range of zoomRatios being tested is [1.0x, minZoomRatio].
960      * The test is skipped if minZoomRatio == 1.0x.
961      */
962     @Test
testZoomSettingsOverrideLatencyOutFrom1x()963     public void testZoomSettingsOverrideLatencyOutFrom1x() throws Exception {
964         testZoomSettingsOverrideLatency("zoom_out_from_1x",
965                 ZoomDirection.ZOOM_OUT, ZoomRange.RATIO_1_OR_SMALLER,
966                 /*checkSmoothZoom*/ false);
967     }
968 
969     /**
970      * Testing Zoom settings override performance for zoom in full range
971      *
972      * The range of zoomRatios being tested is [minZoomRatio, maxZoomRatio].
973      */
974     @Test
testZoomSettingsOverrideLatencyInWithUltraWide()975     public void testZoomSettingsOverrideLatencyInWithUltraWide() throws Exception {
976         testZoomSettingsOverrideLatency("zoom_in_from_ultrawide",
977                 ZoomDirection.ZOOM_IN, ZoomRange.RATIO_FULL_RANGE,
978                 /*checkSmoothZoom*/ true);
979     }
980 
981     /**
982      * Testing Zoom settings override performance for zoom out full range
983      *
984      * The range of zoomRatios being tested is [maxZoomRatio, minZoomRatio].
985      */
986     @Test
testZoomSettingsOverrideLatencyOutWithUltraWide()987     public void testZoomSettingsOverrideLatencyOutWithUltraWide() throws Exception {
988         testZoomSettingsOverrideLatency("zoom_out_to_ultrawide",
989                 ZoomDirection.ZOOM_OUT, ZoomRange.RATIO_FULL_RANGE,
990                 /*checkSmoothZoom*/ true);
991     }
992 
993     /**
994      * Get zoom ratios to be tested for zoom settings override test
995      */
getZoomRatiosToTest(StaticMetadata staticMetadata, boolean checkSmoothZoomForV, ZoomDirection direction, ZoomRange range)996     private double[] getZoomRatiosToTest(StaticMetadata staticMetadata,
997             boolean checkSmoothZoomForV, ZoomDirection direction, ZoomRange range) {
998         Range<Float> zoomRatioRange = staticMetadata.getZoomRatioRangeChecked();
999         final float kSmoothZoomStep = 0.1f;
1000         final float kMaxZoomRatio = 10.0f;
1001         float startRatio = zoomRatioRange.getLower();
1002         float endRatio = Math.min(zoomRatioRange.getUpper(), kMaxZoomRatio);
1003 
1004         if (range == ZoomRange.RATIO_1_OR_LARGER) {
1005             startRatio = 1.0f;
1006         } else if (range == ZoomRange.RATIO_1_OR_SMALLER) {
1007             endRatio = 1.0f;
1008         }
1009 
1010         ArrayList<Double> zoomRatios = new ArrayList<>();
1011         if (!checkSmoothZoomForV) {
1012             // If not checking smooth zoom, equally divide zoom range into NUM_ZOOM_STEPS
1013             // equal pieces.
1014             for (int i = 0; i <= NUM_ZOOM_STEPS; i++) {
1015                 double ratio = startRatio + (endRatio - startRatio) * i / NUM_ZOOM_STEPS;
1016                 zoomRatios.add(adjustZoomRatio(ratio));
1017             }
1018         } else {
1019             // If checking smooth zoom:
1020             // 1. Divide zoom range logarithmically to align with user perception.
1021             // 2. Smaller steps to simulate pinch zoom better, and at the same time giving
1022             //    lens switch enough time.
1023             double stepLog = Math.log(1.0f + kSmoothZoomStep);
1024             // Add zoom-out ratios
1025             for (double logRatio = 0.0f; logRatio >= Math.log(startRatio);
1026                     logRatio -= stepLog) {
1027                 zoomRatios.add(adjustZoomRatio(Math.exp(logRatio)));
1028             }
1029             Collections.reverse(zoomRatios);
1030             // Add zoom-in ratios
1031             for (double logRatio = stepLog; logRatio <= Math.log(endRatio);
1032                     logRatio += stepLog) {
1033                 zoomRatios.add(adjustZoomRatio(Math.exp(logRatio)));
1034             }
1035         }
1036 
1037         if (direction == ZoomDirection.ZOOM_OUT) {
1038             Collections.reverse(zoomRatios);
1039         }
1040         return zoomRatios.stream().mapToDouble(d -> d).toArray();
1041     }
1042 
1043     /**
1044      * If needed, adjust the given zoom ratio so that it is not equal to 1.0
1045      *
1046      */
adjustZoomRatio(double zoomRatio)1047     private double adjustZoomRatio(double zoomRatio) {
1048         if (Flags.zoomMethod()) {
1049             return zoomRatio;
1050         } else {
1051             final double kZoomRatioAt1x = 1.01f;
1052             return zoomRatio == 1.0 ? kZoomRatioAt1x : zoomRatio;
1053         }
1054     }
1055 
1056     /**
1057      * This test measures the zoom latency improvement for devices supporting zoom settings
1058      * override.
1059      */
testZoomSettingsOverrideLatency(String testCase, ZoomDirection direction, ZoomRange range, boolean checkSmoothZoom)1060     private void testZoomSettingsOverrideLatency(String testCase,
1061             ZoomDirection direction, ZoomRange range, boolean checkSmoothZoom) throws Exception {
1062         final float ZOOM_ERROR_MARGIN = 0.05f;
1063         final float ERROR_THRESH_FACTOR = 0.33f;
1064         final int ZOOM_IN_MIN_IMPROVEMENT_IN_FRAMES = 1;
1065         final int MAX_IMPROVEMENT_VARIATION = 2;
1066         final boolean atLeastV =
1067                 PropertyUtil.getFirstApiLevel() > Build.VERSION_CODES.UPSIDE_DOWN_CAKE;
1068         final boolean checkSmoothZoomForV = checkSmoothZoom && atLeastV;
1069 
1070         for (String id : mTestRule.getCameraIdsUnderTest()) {
1071             StaticMetadata staticMetadata = mTestRule.getAllStaticInfo().get(id);
1072             CameraCharacteristics ch = staticMetadata.getCharacteristics();
1073 
1074             if (!staticMetadata.isColorOutputSupported()) {
1075                 continue;
1076             }
1077 
1078             if (!staticMetadata.isZoomSettingsOverrideSupported()) {
1079                 continue;
1080             }
1081 
1082             Range<Float> zoomRatioRange = staticMetadata.getZoomRatioRangeChecked();
1083             float minZoomRatio = zoomRatioRange.getLower();
1084             if (minZoomRatio >= 1.0f && (range != ZoomRange.RATIO_FULL_RANGE)) {
1085                 // Skip if the tests are overlapping with the full range tests.
1086                 continue;
1087             }
1088 
1089             // Figure out zoom ratios to test
1090             double[] ratiosToTest = getZoomRatiosToTest(staticMetadata,
1091                     checkSmoothZoomForV, direction, range);
1092             int numZoomSteps = ratiosToTest.length;
1093             int[] overrideImprovements = new int[numZoomSteps - 1];
1094 
1095             Log.v(TAG, "Camera " + id + " zoom settings: " + Arrays.toString(ratiosToTest));
1096             String streamName = "test_camera_zoom_override_latency";
1097             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
1098             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
1099             mReportLog.addValue("zoom_test_case", testCase, ResultType.NEUTRAL, ResultUnit.NONE);
1100 
1101             try {
1102                 mTestRule.openDevice(id);
1103                 mPreviewSize = mTestRule.getOrderedPreviewSizes().get(0);
1104                 updatePreviewSurface(mPreviewSize);
1105 
1106                 // Start viewfinder with settings override set and the starting zoom ratio,
1107                 // and wait for some number of frames.
1108                 CaptureRequest.Builder previewBuilder = configurePreviewOutputs(id);
1109                 previewBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
1110                         CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
1111                 previewBuilder.set(CaptureRequest.CONTROL_SETTINGS_OVERRIDE,
1112                         CameraMetadata.CONTROL_SETTINGS_OVERRIDE_ZOOM);
1113                 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.BAKLAVA) {
1114                     previewBuilder.set(CaptureRequest.CONTROL_ZOOM_METHOD,
1115                             CameraMetadata.CONTROL_ZOOM_METHOD_ZOOM_RATIO);
1116                 }
1117                 float startZoomRatio = (float) ratiosToTest[0];
1118                 previewBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, startZoomRatio);
1119                 SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1120                 int sequenceId = mTestRule.getCameraSession().setRepeatingRequest(
1121                         previewBuilder.build(), resultListener, mTestRule.getHandler());
1122                 CaptureResult result = CameraTestUtils.waitForNumResults(
1123                         resultListener, NUM_RESULTS_WAIT, WAIT_FOR_RESULT_TIMEOUT_MS);
1124 
1125                 float previousRatio = startZoomRatio;
1126                 for (int j = 0; j < numZoomSteps - 1; j++) {
1127                     float zoomFactor = (float) ratiosToTest[j + 1];
1128                     // The error margin needs to be adjusted based on the zoom step size.
1129                     // We take the min of ZOOM_ERROR_MARGIN and 1/3 of zoom ratio step.
1130                     float zoomErrorMargin = Math.min(ZOOM_ERROR_MARGIN,
1131                             (float) Math.abs(zoomFactor - previousRatio) * ERROR_THRESH_FACTOR);
1132                     previewBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomFactor);
1133                     int newSequenceId = mTestRule.getCameraSession().setRepeatingRequest(
1134                             previewBuilder.build(), resultListener, mTestRule.getHandler());
1135                     long lastFrameNumberForRequest =
1136                             resultListener.getCaptureSequenceLastFrameNumber(sequenceId,
1137                                     WAIT_FOR_RESULT_TIMEOUT_MS);
1138 
1139                     int improvement = 0;
1140                     long frameNumber = -1;
1141                     Log.v(TAG, "LastFrameNumber for sequence " + sequenceId + ": "
1142                             + lastFrameNumberForRequest);
1143                     while (frameNumber < lastFrameNumberForRequest + 1) {
1144                         TotalCaptureResult zoomResult = resultListener.getTotalCaptureResult(
1145                                 WAIT_FOR_RESULT_TIMEOUT_MS);
1146                         frameNumber = zoomResult.getFrameNumber();
1147                         float resultZoomFactor = zoomResult.get(CaptureResult.CONTROL_ZOOM_RATIO);
1148 
1149                         assertTrue(String.format("Zoom ratio should monotonically increase/decrease"
1150                                 + " or stay the same (previous = %f, current = %f", previousRatio,
1151                                 resultZoomFactor),
1152                                 Math.abs(previousRatio - resultZoomFactor) < zoomErrorMargin
1153                                 || (direction == ZoomDirection.ZOOM_IN
1154                                         && previousRatio < resultZoomFactor)
1155                                 || (direction == ZoomDirection.ZOOM_OUT
1156                                         && previousRatio > resultZoomFactor));
1157 
1158                         if (Math.abs(resultZoomFactor - zoomFactor) < zoomErrorMargin
1159                                 && improvement == 0) {
1160                             improvement = (int) (lastFrameNumberForRequest + 1 - frameNumber);
1161                         }
1162                         Log.v(TAG, "frameNumber " + frameNumber + " zoom: " + resultZoomFactor
1163                                 + " improvement: " + improvement);
1164                         previousRatio = resultZoomFactor;
1165                     }
1166 
1167                     // For firstApiLevel < V, zoom in must have at least 1 frame latency
1168                     // improvement. For firstApiLevel >= V, both zoom in and out must have
1169                     // at least 1 frame latency improvement.
1170                     if ((checkSmoothZoomForV || (range == ZoomRange.RATIO_1_OR_LARGER
1171                             && direction == ZoomDirection.ZOOM_IN))
1172                             && staticMetadata.isPerFrameControlSupported()) {
1173                         mTestRule.getCollector().expectTrue(
1174                                 "Zoom-in latency improvement (" + improvement
1175                                 + ") must be at least " + ZOOM_IN_MIN_IMPROVEMENT_IN_FRAMES,
1176                                 improvement >= ZOOM_IN_MIN_IMPROVEMENT_IN_FRAMES);
1177                     }
1178                     overrideImprovements[j] = improvement;
1179 
1180                     sequenceId = newSequenceId;
1181                 }
1182 
1183                 int minImprovement = Arrays.stream(overrideImprovements).min().getAsInt();
1184                 int maxImprovement = Arrays.stream(overrideImprovements).max().getAsInt();
1185                 int variation = maxImprovement - minImprovement;
1186                 // To check smooth zoom for V, the latency improvement must not introduce
1187                 // extra pipeline delay variation.
1188                 int maxVariation = checkSmoothZoomForV ? 0 : MAX_IMPROVEMENT_VARIATION;
1189                 assertTrue(
1190                         String.format("Zoom latency improvement variation %d must not exceed %d",
1191                                 variation, maxVariation), variation <= maxVariation);
1192 
1193                 mReportLog.addValues("Camera zoom ratios", ratiosToTest, ResultType.NEUTRAL,
1194                         ResultUnit.NONE);
1195                 mReportLog.addValues("Latency improvements", overrideImprovements,
1196                         ResultType.HIGHER_BETTER, ResultUnit.FRAMES);
1197             } finally {
1198                 mTestRule.closeDefaultImageReader();
1199                 mTestRule.closeDevice(id);
1200                 closePreviewSurface();
1201             }
1202             mReportLog.submit(mInstrumentation);
1203 
1204             if (VERBOSE) {
1205                 Log.v(TAG, "Camera " + id + " zoom settings: " + Arrays.toString(ratiosToTest));
1206                 Log.v(TAG, "Camera " + id + " zoom settings override latency improvements "
1207                         + "(in frames): " + Arrays.toString(overrideImprovements));
1208             }
1209         }
1210     }
1211 
1212     /**
1213      * Testing SurfaceView jitter reduction performance
1214      *
1215      * Because the application doesn't have access to SurfaceView frames,
1216      * we use an ImageReader with COMPOSER_OVERLAY usage.
1217      */
1218     @Test
1219     public void testSurfaceViewJitterReduction() throws Exception {
1220         String cameraId = null;
1221         Range<Integer>[] aeFpsRanges = null;
1222         for (String id : mTestRule.getCameraIdsUnderTest()) {
1223             StaticMetadata staticMetadata = mTestRule.getAllStaticInfo().get(id);
1224             if (staticMetadata.isColorOutputSupported()) {
1225                 cameraId = id;
1226                 aeFpsRanges = staticMetadata.getAeAvailableTargetFpsRangesChecked();
1227                 // Because jitter reduction is a framework feature and not camera specific,
1228                 // we only test for 1 camera Id.
1229                 break;
1230             }
1231         }
1232         if (cameraId == null) {
1233             Log.i(TAG, "No camera supports color outputs, skipping");
1234             return;
1235         }
1236 
1237         try {
1238             mTestRule.openDevice(cameraId);
1239 
1240             for (Range<Integer> fpsRange : aeFpsRanges) {
1241                 if (Objects.equals(fpsRange.getLower(), fpsRange.getUpper())) {
1242                     testPreviewJitterForFpsRange(cameraId,
1243                             HardwareBuffer.USAGE_COMPOSER_OVERLAY,
1244                             /*reduceJitter*/false, fpsRange);
1245 
1246                     testPreviewJitterForFpsRange(cameraId,
1247                             HardwareBuffer.USAGE_COMPOSER_OVERLAY,
1248                             /*reduceJitter*/true, fpsRange);
1249                 }
1250             }
1251         } finally {
1252             mTestRule.closeDevice(cameraId);
1253         }
1254     }
1255 
1256     /**
1257      * Testing SurfaceTexture jitter reduction performance
1258      */
1259     @Test
1260     public void testSurfaceTextureJitterReduction() throws Exception {
1261         String cameraId = null;
1262         Range<Integer>[] aeFpsRanges = null;
1263         for (String id : mTestRule.getCameraIdsUnderTest()) {
1264             StaticMetadata staticMetadata = mTestRule.getAllStaticInfo().get(id);
1265             if (staticMetadata.isColorOutputSupported()) {
1266                 cameraId = id;
1267                 aeFpsRanges = staticMetadata.getAeAvailableTargetFpsRangesChecked();
1268                 // Because jitter reduction is a framework feature and not camera specific,
1269                 // we only test for 1 camera Id.
1270                 break;
1271             }
1272         }
1273         if (cameraId == null) {
1274             Log.i(TAG, "No camera supports color outputs, skipping");
1275             return;
1276         }
1277 
1278         try {
1279             mTestRule.openDevice(cameraId);
1280 
1281             for (Range<Integer> fpsRange : aeFpsRanges) {
1282                 if (Objects.equals(fpsRange.getLower(), fpsRange.getUpper())) {
1283                     testPreviewJitterForFpsRange(cameraId,
1284                             HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE,
1285                             /*reduceJitter*/false, fpsRange);
1286                     testPreviewJitterForFpsRange(cameraId,
1287                             HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE,
1288                             /*reduceJitter*/true, fpsRange);
1289                 }
1290             }
1291         } finally {
1292             mTestRule.closeDevice(cameraId);
1293         }
1294     }
1295 
1296     private void testPreviewJitterForFpsRange(String cameraId, long usage,
1297             boolean reduceJitter, Range<Integer> fpsRange) throws Exception {
1298         try {
1299             assertTrue("usage must be COMPOSER_OVERLAY/GPU_SAMPLED_IMAGE, but is " + usage,
1300                     usage == HardwareBuffer.USAGE_COMPOSER_OVERLAY
1301                     || usage == HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE);
1302             String streamName = "test_camera_preview_jitter_";
1303             if (usage == HardwareBuffer.USAGE_COMPOSER_OVERLAY) {
1304                 streamName += "surface_view";
1305             } else {
1306                 streamName += "surface_texture";
1307             }
1308             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
1309             mReportLog.addValue("camera_id", cameraId, ResultType.NEUTRAL, ResultUnit.NONE);
1310 
1311             // Display refresh rate while camera is active. Note that the default display's
1312             // getRefreshRate() isn't reflecting the real refresh rate. Hardcode it for now.
1313             float refreshRate = 60.0f;
1314             float numRefreshesPerDuration = refreshRate / fpsRange.getLower();
1315             long refreshInterval = (long) (1000000000L / refreshRate);
1316 
1317             Long frameDuration = (long) (1e9 / fpsRange.getLower());
1318             initializeImageReader(cameraId, ImageFormat.PRIVATE,
1319                     frameDuration, usage);
1320 
1321             CameraCharacteristics ch =
1322                     mTestRule.getCameraManager().getCameraCharacteristics(cameraId);
1323             Integer timestampSource = ch.get(CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE);
1324             assertNotNull("Timestamp source must not be null", timestampSource);
1325 
1326             boolean timestampIsRealtime = false;
1327             if (timestampSource == CameraMetadata.SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME
1328                     && (!reduceJitter || usage == HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE)) {
1329                 timestampIsRealtime = true;
1330             }
1331             SimpleTimestampListener imageListener =
1332                     new SimpleTimestampListener(timestampIsRealtime);
1333             mTestRule.getReader().setOnImageAvailableListener(
1334                     imageListener, mTestRule.getHandler());
1335 
1336             CaptureRequest.Builder previewBuilder = mTestRule.getCamera().createCaptureRequest(
1337                     CameraDevice.TEMPLATE_PREVIEW);
1338             previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
1339             previewBuilder.addTarget(mTestRule.getReaderSurface());
1340             CaptureRequest previewRequest = previewBuilder.build();
1341 
1342             List<OutputConfiguration> outputConfigs = new ArrayList<>();
1343             OutputConfiguration config = new OutputConfiguration(mTestRule.getReaderSurface());
1344             if (!reduceJitter) {
1345                 config.setTimestampBase(OutputConfiguration.TIMESTAMP_BASE_SENSOR);
1346             }
1347             outputConfigs.add(config);
1348 
1349             boolean useSessionKeys = isFpsRangeASessionKey(ch);
1350             mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1351             configureAndSetCameraSessionWithConfigs(outputConfigs, useSessionKeys, previewRequest);
1352 
1353             // Start preview and run for 6 seconds
1354             SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1355             mTestRule.getCameraSession().setRepeatingRequest(
1356                     previewRequest, resultListener, mTestRule.getHandler());
1357 
1358             Thread.sleep(6000);
1359 
1360             blockingStopRepeating();
1361 
1362             // Let N be expected number of VSYNCs between frames
1363             //
1364             // Number of frames ahead of expected VSYNC: 0.5 * VSYNC < frame duration <=
1365             // (N - 0.5) * VSYNC
1366             long framesAheadCount = 0;
1367             // Number of frames delayed past the expected VSYNC: frame duration >= (N + 0.5) * VSYNC
1368             long framesDelayedCount = 0;
1369             // Number of frames dropped: Fell into one single VSYNC
1370             long framesDroppedCount = 0;
1371             // The number of frame intervals in total
1372             long intervalCount = imageListener.getTimestampCount() - 1;
1373             assertTrue("Number of timestamp intervals must be at least 1, but is " + intervalCount,
1374                     intervalCount >= 1);
1375             // The sum of delays in ms for all frames captured
1376             double framesDelayInMs = 0;
1377 
1378             SimpleTimestampListener.TimestampHolder timestamp1 =
1379                     imageListener.getNextTimestampHolder();
1380             if (usage == HardwareBuffer.USAGE_COMPOSER_OVERLAY) {
1381                 framesDelayInMs =
1382                         Math.max(0, timestamp1.mTimestamp - timestamp1.mDeliveryTime) / 1000000;
1383             } else {
1384                 framesDelayInMs =
1385                         (timestamp1.mDeliveryTime - timestamp1.mTimestamp) / 1000000;
1386             }
1387             for (long i = 0; i < intervalCount; i++) {
1388                 SimpleTimestampListener.TimestampHolder timestamp2 =
1389                         imageListener.getNextTimestampHolder();
1390                 // The listener uses the image timestamp if it's in the future. Otherwise, use
1391                 // the current system time (image delivery time).
1392                 long presentTime2 = Math.max(timestamp2.mDeliveryTime, timestamp2.mTimestamp);
1393                 long presentTime1 = Math.max(timestamp1.mDeliveryTime, timestamp1.mTimestamp);
1394                 long frameInterval = presentTime2 - presentTime1;
1395                 if (frameInterval <= refreshInterval / 2) {
1396                     framesDroppedCount++;
1397                 } else if (frameInterval <= refreshInterval * (numRefreshesPerDuration - 0.5f)) {
1398                     framesAheadCount++;
1399                 } else if (frameInterval >=  refreshInterval * (numRefreshesPerDuration + 0.5f)) {
1400                     framesDelayedCount++;
1401                 }
1402 
1403                 if (usage == HardwareBuffer.USAGE_COMPOSER_OVERLAY) {
1404                     framesDelayInMs +=
1405                             Math.max(0, timestamp2.mTimestamp - timestamp2.mDeliveryTime) / 1000000;
1406                 } else {
1407                     framesDelayInMs +=
1408                             (timestamp1.mDeliveryTime - timestamp1.mTimestamp) / 1000000;
1409                 }
1410                 timestamp1 = timestamp2;
1411             }
1412             imageListener.reset();
1413 
1414             mReportLog.addValue("reduce_jitter", reduceJitter, ResultType.NEUTRAL,
1415                     ResultUnit.NONE);
1416             mReportLog.addValue("camera_configured_frame_rate", fpsRange.getLower(),
1417                     ResultType.NEUTRAL, ResultUnit.NONE);
1418             mReportLog.addValue("camera_preview_frame_dropped_rate",
1419                     1.0f * framesDroppedCount / intervalCount, ResultType.LOWER_BETTER,
1420                     ResultUnit.NONE);
1421             mReportLog.addValue("camera_preview_frame_ahead_rate",
1422                     1.0f * framesAheadCount / intervalCount, ResultType.LOWER_BETTER,
1423                     ResultUnit.NONE);
1424             mReportLog.addValue("camera_preview_frame_delayed_rate",
1425                     1.0f * framesDelayedCount / intervalCount,
1426                     ResultType.LOWER_BETTER, ResultUnit.NONE);
1427             mReportLog.addValue("camera_preview_frame_latency_ms",
1428                     framesDelayInMs / (intervalCount + 1), ResultType.LOWER_BETTER,
1429                     ResultUnit.MS);
1430 
1431             if (VERBOSE) {
1432                 Log.v(TAG, "Camera " + cameraId + " frame rate: " + fpsRange.getLower()
1433                         + ", dropped rate: " + (1.0f * framesDroppedCount / intervalCount)
1434                         + ", ahead rate: " + (1.0f * framesAheadCount / intervalCount)
1435                         + ", delayed rate: " + (1.0f * framesDelayedCount / intervalCount)
1436                         + ", latency in ms: " + (framesDelayInMs / (intervalCount + 1)));
1437             }
1438         } finally {
1439             mTestRule.closeDefaultImageReader();
1440             mReportLog.submit(mInstrumentation);
1441         }
1442     }
1443 
1444     private void reprocessingCaptureStallTestByCamera(int reprocessInputFormat) throws Exception {
1445         prepareReprocessCapture(reprocessInputFormat);
1446 
1447         // Let it stream for a while before reprocessing
1448         startZslStreaming();
1449         waitForFrames(NUM_RESULTS_WAIT);
1450 
1451         final int NUM_REPROCESS_TESTED = MAX_REPROCESS_IMAGES / 2;
1452         // Prepare several reprocessing request
1453         Image[] inputImages = new Image[NUM_REPROCESS_TESTED];
1454         CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
1455         for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
1456             inputImages[i] =
1457                     mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1458             TotalCaptureResult zslResult =
1459                     mZslResultListener.getCaptureResult(
1460                             WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
1461             reprocessReqs[i] = mTestRule.getCamera().createReprocessCaptureRequest(zslResult);
1462             reprocessReqs[i].addTarget(mJpegReader.getSurface());
1463             reprocessReqs[i].set(CaptureRequest.NOISE_REDUCTION_MODE,
1464                     CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
1465             reprocessReqs[i].set(CaptureRequest.EDGE_MODE,
1466                     CaptureRequest.EDGE_MODE_HIGH_QUALITY);
1467             mWriter.queueInputImage(inputImages[i]);
1468         }
1469 
1470         double[] maxCaptureGapsMs = new double[NUM_REPROCESS_TESTED];
1471         double[] averageFrameDurationMs = new double[NUM_REPROCESS_TESTED];
1472         Arrays.fill(averageFrameDurationMs, 0.0);
1473         final int MAX_REPROCESS_RETURN_FRAME_COUNT = 20;
1474         SimpleCaptureCallback reprocessResultListener = new SimpleCaptureCallback();
1475         for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
1476             mZslResultListener.drain();
1477             CaptureRequest reprocessRequest = reprocessReqs[i].build();
1478             mTestRule.getCameraSession().capture(
1479                     reprocessRequest, reprocessResultListener, mTestRule.getHandler());
1480             // Wait for reprocess output jpeg and result come back.
1481             reprocessResultListener.getCaptureResultForRequest(reprocessRequest,
1482                     CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
1483             mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS).close();
1484             long numFramesMaybeStalled = mZslResultListener.getTotalNumFrames();
1485             assertTrue("Reprocess capture result should be returned in "
1486                             + MAX_REPROCESS_RETURN_FRAME_COUNT + " frames",
1487                     numFramesMaybeStalled <= MAX_REPROCESS_RETURN_FRAME_COUNT);
1488 
1489             // Need look longer time, as the stutter could happen after the reprocessing
1490             // output frame is received.
1491             long[] timestampGap = new long[MAX_REPROCESS_RETURN_FRAME_COUNT + 1];
1492             Arrays.fill(timestampGap, 0);
1493             CaptureResult[] results = new CaptureResult[timestampGap.length];
1494             long[] frameDurationsNs = new long[timestampGap.length];
1495             for (int j = 0; j < results.length; j++) {
1496                 results[j] = mZslResultListener.getCaptureResult(
1497                         CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1498                 if (j > 0) {
1499                     timestampGap[j] = results[j].get(CaptureResult.SENSOR_TIMESTAMP) -
1500                             results[j - 1].get(CaptureResult.SENSOR_TIMESTAMP);
1501                     assertTrue("Time stamp should be monotonically increasing",
1502                             timestampGap[j] > 0);
1503                 }
1504                 frameDurationsNs[j] = results[j].get(CaptureResult.SENSOR_FRAME_DURATION);
1505             }
1506 
1507             if (VERBOSE) {
1508                 Log.i(TAG, "timestampGap: " + Arrays.toString(timestampGap));
1509                 Log.i(TAG, "frameDurationsNs: " + Arrays.toString(frameDurationsNs));
1510             }
1511 
1512             // Get the number of candidate results, calculate the average frame duration
1513             // and max timestamp gap.
1514             Arrays.sort(timestampGap);
1515             double maxTimestampGapMs = timestampGap[timestampGap.length - 1] / 1000000.0;
1516             for (int m = 0; m < frameDurationsNs.length; m++) {
1517                 averageFrameDurationMs[i] += (frameDurationsNs[m] / 1000000.0);
1518             }
1519             averageFrameDurationMs[i] /= frameDurationsNs.length;
1520 
1521             maxCaptureGapsMs[i] = maxTimestampGapMs;
1522         }
1523 
1524         blockingStopRepeating();
1525 
1526         String reprocessType = "YUV reprocessing";
1527         if (reprocessInputFormat == ImageFormat.PRIVATE) {
1528             reprocessType = "opaque reprocessing";
1529         }
1530         mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL, ResultUnit.NONE);
1531         mReportLog.addValues("max_capture_timestamp_gaps", maxCaptureGapsMs,
1532                 ResultType.LOWER_BETTER, ResultUnit.MS);
1533         mReportLog.addValues("capture_average_frame_duration", averageFrameDurationMs,
1534                 ResultType.LOWER_BETTER, ResultUnit.MS);
1535         mReportLog.setSummary("camera_reprocessing_average_max_capture_timestamp_gaps",
1536                 Stat.getAverage(maxCaptureGapsMs), ResultType.LOWER_BETTER, ResultUnit.MS);
1537 
1538         // The max timestamp gap should be less than (captureStall + 1) x average frame
1539         // duration * (1 + error margin).
1540         int maxCaptureStallFrames = mTestRule.getStaticInfo().getMaxCaptureStallOrDefault();
1541         for (int i = 0; i < maxCaptureGapsMs.length; i++) {
1542             double stallDurationBound = averageFrameDurationMs[i] *
1543                     (maxCaptureStallFrames + 1) * (1 + REPROCESS_STALL_MARGIN);
1544             assertTrue("max capture stall duration should be no larger than " + stallDurationBound,
1545                     maxCaptureGapsMs[i] <= stallDurationBound);
1546         }
1547     }
1548 
1549     private void reprocessingPerformanceTestByCamera(int reprocessInputFormat, boolean asyncMode,
1550             boolean requireHighQuality)
1551             throws Exception {
1552         // Prepare the reprocessing capture
1553         prepareReprocessCapture(reprocessInputFormat);
1554 
1555         // Start ZSL streaming
1556         startZslStreaming();
1557         waitForFrames(NUM_RESULTS_WAIT);
1558 
1559         CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
1560         Image[] inputImages = new Image[MAX_REPROCESS_IMAGES];
1561         double[] getImageLatenciesMs = new double[MAX_REPROCESS_IMAGES];
1562         long startTimeMs;
1563         for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1564             inputImages[i] =
1565                     mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1566             TotalCaptureResult zslResult =
1567                     mZslResultListener.getCaptureResult(
1568                             WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
1569             reprocessReqs[i] = mTestRule.getCamera().createReprocessCaptureRequest(zslResult);
1570             if (requireHighQuality) {
1571                 // Reprocessing should support high quality for NR and edge modes.
1572                 reprocessReqs[i].set(CaptureRequest.NOISE_REDUCTION_MODE,
1573                         CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
1574                 reprocessReqs[i].set(CaptureRequest.EDGE_MODE,
1575                         CaptureRequest.EDGE_MODE_HIGH_QUALITY);
1576             }
1577             reprocessReqs[i].addTarget(mJpegReader.getSurface());
1578         }
1579 
1580         if (asyncMode) {
1581             // async capture: issue all the reprocess requests as quick as possible, then
1582             // check the throughput of the output jpegs.
1583             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1584                 // Could be slow for YUV reprocessing, do it in advance.
1585                 mWriter.queueInputImage(inputImages[i]);
1586             }
1587 
1588             // Submit the requests
1589             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1590                 mTestRule.getCameraSession().capture(reprocessReqs[i].build(), null, null);
1591             }
1592 
1593             // Get images
1594             startTimeMs = SystemClock.elapsedRealtime();
1595             Image jpegImages[] = new Image[MAX_REPROCESS_IMAGES];
1596             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1597                 jpegImages[i] = mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1598                 getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
1599                 startTimeMs = SystemClock.elapsedRealtime();
1600             }
1601             for (Image i : jpegImages) {
1602                 i.close();
1603             }
1604         } else {
1605             // sync capture: issue reprocess request one by one, only submit next one when
1606             // the previous capture image is returned. This is to test the back to back capture
1607             // performance.
1608             Image jpegImages[] = new Image[MAX_REPROCESS_IMAGES];
1609             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1610                 startTimeMs = SystemClock.elapsedRealtime();
1611                 mWriter.queueInputImage(inputImages[i]);
1612                 mTestRule.getCameraSession().capture(reprocessReqs[i].build(), null, null);
1613                 jpegImages[i] = mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1614                 getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
1615             }
1616             for (Image i : jpegImages) {
1617                 i.close();
1618             }
1619         }
1620 
1621         blockingStopRepeating();
1622 
1623         String reprocessType = "YUV reprocessing";
1624         if (reprocessInputFormat == ImageFormat.PRIVATE) {
1625             reprocessType = "opaque reprocessing";
1626         }
1627 
1628         // Report the performance data
1629         String captureMsg;
1630         if (asyncMode) {
1631             captureMsg = "capture latency";
1632             if (requireHighQuality) {
1633                 captureMsg += " for High Quality noise reduction and edge modes";
1634             }
1635             mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL,
1636                     ResultUnit.NONE);
1637             mReportLog.addValue("capture_message", captureMsg, ResultType.NEUTRAL,
1638                     ResultUnit.NONE);
1639             mReportLog.addValues("latency", getImageLatenciesMs, ResultType.LOWER_BETTER,
1640                     ResultUnit.MS);
1641             mReportLog.setSummary("camera_reprocessing_average_latency",
1642                     Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER, ResultUnit.MS);
1643         } else {
1644             captureMsg = "shot to shot latency";
1645             if (requireHighQuality) {
1646                 captureMsg += " for High Quality noise reduction and edge modes";
1647             }
1648             mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL,
1649                     ResultUnit.NONE);
1650             mReportLog.addValue("capture_message", captureMsg, ResultType.NEUTRAL,
1651                     ResultUnit.NONE);
1652             mReportLog.addValues("latency", getImageLatenciesMs, ResultType.LOWER_BETTER,
1653                     ResultUnit.MS);
1654             mReportLog.setSummary("camera_reprocessing_shot_to_shot_average_latency",
1655                     Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER, ResultUnit.MS);
1656         }
1657     }
1658 
1659     /**
1660      * Start preview and ZSL streaming
1661      */
1662     private void startZslStreaming() throws Exception {
1663         CaptureRequest.Builder zslBuilder =
1664                 mTestRule.getCamera().createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
1665         zslBuilder.addTarget(mPreviewSurface);
1666         zslBuilder.addTarget(mCameraZslReader.getSurface());
1667         mTestRule.getCameraSession().setRepeatingRequest(
1668                 zslBuilder.build(), mZslResultListener, mTestRule.getHandler());
1669     }
1670 
1671     /**
1672      * Wait for a certain number of frames, the images and results will be drained from the
1673      * listeners to make sure that next reprocessing can get matched results and images.
1674      *
1675      * @param numFrameWait The number of frames to wait before return, 0 means that
1676      *      this call returns immediately after streaming on.
1677      */
1678     private void waitForFrames(int numFrameWait) throws Exception {
1679         if (numFrameWait < 0) {
1680             throw new IllegalArgumentException("numFrameWait " + numFrameWait +
1681                     " should be non-negative");
1682         }
1683 
1684         for (int i = 0; i < numFrameWait; i++) {
1685             mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS).close();
1686         }
1687     }
1688 
1689     private void closeReaderWriters() {
1690         mCameraZslImageListener.drain();
1691         CameraTestUtils.closeImageReader(mCameraZslReader);
1692         mCameraZslReader = null;
1693         mJpegListener.drain();
1694         CameraTestUtils.closeImageReader(mJpegReader);
1695         mJpegReader = null;
1696         CameraTestUtils.closeImageWriter(mWriter);
1697         mWriter = null;
1698     }
1699 
1700     private void prepareReprocessCapture(int inputFormat)
1701             throws CameraAccessException {
1702         // 1. Find the right preview and capture sizes.
1703         Size maxPreviewSize = mTestRule.getOrderedPreviewSizes().get(0);
1704         Size[] supportedInputSizes =
1705                 mTestRule.getStaticInfo().getAvailableSizesForFormatChecked(inputFormat,
1706                         StaticMetadata.StreamDirection.Input);
1707         Size maxInputSize = CameraTestUtils.getMaxSize(supportedInputSizes);
1708         Size maxJpegSize = mTestRule.getOrderedStillSizes().get(0);
1709         updatePreviewSurface(maxPreviewSize);
1710         mZslResultListener = new SimpleCaptureCallback();
1711 
1712         // 2. Create camera output ImageReaders.
1713         // YUV/Opaque output, camera should support output with input size/format
1714         mCameraZslImageListener = new SimpleImageReaderListener(
1715                 /*asyncMode*/true, MAX_ZSL_IMAGES - MAX_REPROCESS_IMAGES);
1716         mCameraZslReader = CameraTestUtils.makeImageReader(
1717                 maxInputSize, inputFormat, MAX_ZSL_IMAGES,
1718                 mCameraZslImageListener, mTestRule.getHandler());
1719         // Jpeg reprocess output
1720         mJpegListener = new SimpleImageReaderListener();
1721         mJpegReader = CameraTestUtils.makeImageReader(
1722                 maxJpegSize, ImageFormat.JPEG, MAX_JPEG_IMAGES,
1723                 mJpegListener, mTestRule.getHandler());
1724 
1725         // create camera reprocess session
1726         List<Surface> outSurfaces = new ArrayList<Surface>();
1727         outSurfaces.add(mPreviewSurface);
1728         outSurfaces.add(mCameraZslReader.getSurface());
1729         outSurfaces.add(mJpegReader.getSurface());
1730         InputConfiguration inputConfig = new InputConfiguration(maxInputSize.getWidth(),
1731                 maxInputSize.getHeight(), inputFormat);
1732         mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1733         mTestRule.setCameraSession(CameraTestUtils.configureReprocessableCameraSession(
1734                 mTestRule.getCamera(), inputConfig, outSurfaces,
1735                 mTestRule.getCameraSessionListener(), mTestRule.getHandler()));
1736 
1737         // 3. Create ImageWriter for input
1738         mWriter = CameraTestUtils.makeImageWriter(
1739                 mTestRule.getCameraSession().getInputSurface(), MAX_INPUT_IMAGES,
1740                 /*listener*/null, /*handler*/null);
1741     }
1742 
1743     /**
1744      * Stop repeating requests for current camera and waiting for it to go back to idle, resulting
1745      * in an idle device.
1746      */
1747     private void blockingStopRepeating() throws Exception {
1748         stopRepeating();
1749         mTestRule.getCameraSessionListener().getStateWaiter().waitForState(
1750                 BlockingSessionCallback.SESSION_READY, CameraTestUtils.CAMERA_IDLE_TIMEOUT_MS);
1751     }
1752 
1753     private void blockingStartPreview(String id, CaptureCallback listener,
1754             CaptureRequest previewRequest, SimpleImageListener imageListener)
1755             throws Exception {
1756         mTestRule.getCameraSession().setRepeatingRequest(
1757                 previewRequest, listener, mTestRule.getHandler());
1758         imageListener.waitForImageAvailable(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1759     }
1760 
1761     /**
1762      * Setup still capture configuration and start preview.
1763      *
1764      * @param id The camera id under test
1765      * @param previewBuilder The capture request builder to be used for preview
1766      * @param stillBuilder The capture request builder to be used for still capture
1767      * @param previewSz Preview size
1768      * @param captureSizes Still capture sizes
1769      * @param formats The single capture image formats
1770      * @param resultListener Capture result listener
1771      * @param maxNumImages The max number of images set to the image reader
1772      * @param imageListeners The single capture capture image listeners
1773      * @param enablePostView Enable post view as part of the still capture request
1774      */
1775     private ImageReader[] prepareStillCaptureAndStartPreview(String id,
1776             CaptureRequest.Builder previewBuilder, CaptureRequest.Builder stillBuilder,
1777             Size previewSz, Size[] captureSizes, int[] formats, CaptureCallback resultListener,
1778             int maxNumImages, ImageReader.OnImageAvailableListener[] imageListeners,
1779             boolean enablePostView)
1780             throws Exception {
1781 
1782         if ((captureSizes == null) || (formats == null) || (imageListeners == null) &&
1783                 (captureSizes.length != formats.length) ||
1784                 (formats.length != imageListeners.length)) {
1785             throw new IllegalArgumentException("Invalid capture sizes/formats or image listeners!");
1786         }
1787 
1788         if (VERBOSE) {
1789             Log.v(TAG, String.format("Prepare still capture and preview (%s)",
1790                     previewSz.toString()));
1791         }
1792 
1793         // Update preview size.
1794         updatePreviewSurface(previewSz);
1795 
1796         ImageReader[] readers = new ImageReader[captureSizes.length];
1797         List<Surface> outputSurfaces = new ArrayList<Surface>();
1798         outputSurfaces.add(mPreviewSurface);
1799         for (int i = 0; i < captureSizes.length; i++) {
1800             readers[i] = CameraTestUtils.makeImageReader(captureSizes[i], formats[i], maxNumImages,
1801                     imageListeners[i], mTestRule.getHandler());
1802             outputSurfaces.add(readers[i].getSurface());
1803         }
1804 
1805         // Configure the requests.
1806         previewBuilder.addTarget(mPreviewSurface);
1807         if (enablePostView)
1808             stillBuilder.addTarget(mPreviewSurface);
1809         for (int i = 0; i < readers.length; i++) {
1810             stillBuilder.addTarget(readers[i].getSurface());
1811         }
1812 
1813         // Update target fps based on the min frame duration of preview.
1814         CameraCharacteristics ch = mTestRule.getStaticInfo().getCharacteristics();
1815         StreamConfigurationMap config = ch.get(
1816                 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
1817         long minFrameDuration = Math.max(FRAME_DURATION_NS_30FPS, config.getOutputMinFrameDuration(
1818                 SurfaceTexture.class, previewSz));
1819         Range<Integer> targetRange =
1820                 CameraTestUtils.getSuitableFpsRangeForDuration(id,
1821                 minFrameDuration, mTestRule.getStaticInfo());
1822         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
1823         stillBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
1824 
1825         CaptureRequest previewRequest = previewBuilder.build();
1826         mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1827         boolean useSessionKeys = isFpsRangeASessionKey(ch);
1828         configureAndSetCameraSession(outputSurfaces, useSessionKeys, previewRequest);
1829 
1830         // Start preview.
1831         mTestRule.getCameraSession().setRepeatingRequest(
1832                 previewRequest, resultListener, mTestRule.getHandler());
1833 
1834         return readers;
1835     }
1836 
1837     /**
1838      * Helper function to check if TARGET_FPS_RANGE is a session parameter
1839      */
1840     private boolean isFpsRangeASessionKey(CameraCharacteristics ch) {
1841         List<CaptureRequest.Key<?>> sessionKeys = ch.getAvailableSessionKeys();
1842         return sessionKeys != null &&
1843                 sessionKeys.contains(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE);
1844     }
1845 
1846     /**
1847      * Helper function to configure camera session using parameters provided.
1848      */
1849     private void configureAndSetCameraSession(List<Surface> surfaces,
1850             boolean useInitialRequest, CaptureRequest initialRequest)
1851             throws CameraAccessException {
1852         CameraCaptureSession cameraSession;
1853         if (useInitialRequest) {
1854             cameraSession = CameraTestUtils.configureCameraSessionWithParameters(
1855                 mTestRule.getCamera(), surfaces,
1856                 mTestRule.getCameraSessionListener(), mTestRule.getHandler(),
1857                 initialRequest);
1858         } else {
1859             cameraSession = CameraTestUtils.configureCameraSession(
1860                 mTestRule.getCamera(), surfaces,
1861                 mTestRule.getCameraSessionListener(), mTestRule.getHandler());
1862         }
1863         mTestRule.setCameraSession(cameraSession);
1864     }
1865 
1866     /*
1867      * Helper function to configure camera session using parameters provided.
1868      */
1869     private void configureAndSetCameraSessionWithConfigs(List<OutputConfiguration> configs,
1870             boolean useInitialRequest, CaptureRequest initialRequest)
1871             throws CameraAccessException {
1872         CameraCaptureSession cameraSession;
1873         if (useInitialRequest) {
1874             cameraSession = CameraTestUtils.tryConfigureCameraSessionWithConfig(
1875                 mTestRule.getCamera(), configs, initialRequest,
1876                 mTestRule.getCameraSessionListener(), mTestRule.getHandler());
1877         } else {
1878             cameraSession = CameraTestUtils.configureCameraSessionWithConfig(
1879                 mTestRule.getCamera(), configs,
1880                 mTestRule.getCameraSessionListener(), mTestRule.getHandler());
1881         }
1882         mTestRule.setCameraSession(cameraSession);
1883     }
1884 
1885     /**
1886      * Setup single capture configuration and start preview.
1887      *
1888      * @param previewBuilder The capture request builder to be used for preview
1889      * @param stillBuilder The capture request builder to be used for still capture
1890      * @param previewSz Preview size
1891      * @param captureSz Still capture size
1892      * @param format The single capture image format
1893      * @param resultListener Capture result listener
1894      * @param sessionListener Session listener
1895      * @param maxNumImages The max number of images set to the image reader
1896      * @param imageListener The single capture capture image listener
1897      * @param useSessionKeys Create capture session using session keys from previewRequest
1898      */
1899     private void prepareCaptureAndStartPreview(CaptureRequest.Builder previewBuilder,
1900             CaptureRequest.Builder stillBuilder, Size previewSz, Size captureSz, int format,
1901             CaptureCallback resultListener, CameraCaptureSession.StateCallback sessionListener,
1902             int maxNumImages, ImageReader.OnImageAvailableListener imageListener,
1903             boolean  useSessionKeys) throws Exception {
1904         if ((captureSz == null) || (imageListener == null)) {
1905             throw new IllegalArgumentException("Invalid capture size or image listener!");
1906         }
1907 
1908         if (VERBOSE) {
1909             Log.v(TAG, String.format("Prepare single capture (%s) and preview (%s)",
1910                     captureSz.toString(), previewSz.toString()));
1911         }
1912 
1913         // Update preview size.
1914         updatePreviewSurface(previewSz);
1915 
1916         // Create ImageReader.
1917         mTestRule.createDefaultImageReader(captureSz, format, maxNumImages, imageListener);
1918 
1919         // Configure output streams with preview and jpeg streams.
1920         List<Surface> outputSurfaces = new ArrayList<Surface>();
1921         outputSurfaces.add(mPreviewSurface);
1922         outputSurfaces.add(mTestRule.getReaderSurface());
1923         if (sessionListener == null) {
1924             mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1925         } else {
1926             mTestRule.setCameraSessionListener(new BlockingSessionCallback(sessionListener));
1927         }
1928 
1929         // Configure the requests.
1930         previewBuilder.addTarget(mPreviewSurface);
1931         stillBuilder.addTarget(mPreviewSurface);
1932         stillBuilder.addTarget(mTestRule.getReaderSurface());
1933         CaptureRequest previewRequest = previewBuilder.build();
1934 
1935         configureAndSetCameraSession(outputSurfaces, useSessionKeys, previewRequest);
1936 
1937         // Start preview.
1938         mTestRule.getCameraSession().setRepeatingRequest(
1939                 previewRequest, resultListener, mTestRule.getHandler());
1940     }
1941 
1942     /**
1943      * Update the preview surface size.
1944      *
1945      * @param size The preview size to be updated.
1946      */
1947     private void updatePreviewSurface(Size size) {
1948         if ((mPreviewSurfaceTexture != null ) || (mPreviewSurface != null)) {
1949             closePreviewSurface();
1950         }
1951 
1952         mPreviewSurfaceTexture = new SurfaceTexture(/*random int*/ 1);
1953         mPreviewSurfaceTexture.setDefaultBufferSize(size.getWidth(), size.getHeight());
1954         mPreviewSurface = new Surface(mPreviewSurfaceTexture);
1955     }
1956 
1957     /**
1958      * Release preview surface and corresponding surface texture.
1959      */
1960     private void closePreviewSurface() {
1961         if (mPreviewSurface != null) {
1962             mPreviewSurface.release();
1963             mPreviewSurface = null;
1964         }
1965 
1966         if (mPreviewSurfaceTexture != null) {
1967             mPreviewSurfaceTexture.release();
1968             mPreviewSurfaceTexture = null;
1969         }
1970     }
1971 
1972     private boolean isReprocessSupported(String cameraId, int format)
1973             throws CameraAccessException {
1974         if (format != ImageFormat.YUV_420_888 && format != ImageFormat.PRIVATE) {
1975             throw new IllegalArgumentException(
1976                     "format " + format + " is not supported for reprocessing");
1977         }
1978 
1979         StaticMetadata info = new StaticMetadata(
1980                 mTestRule.getCameraManager().getCameraCharacteristics(cameraId), CheckLevel.ASSERT,
1981                 /*collector*/ null);
1982         int cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING;
1983         if (format == ImageFormat.PRIVATE) {
1984             cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING;
1985         }
1986         return info.isCapabilitySupported(cap);
1987     }
1988 
1989     /**
1990      * Stop the repeating requests of current camera.
1991      * Does _not_ wait for the device to go idle
1992      */
1993     private void stopRepeating() throws Exception {
1994         // Stop repeat, wait for captures to complete, and disconnect from surfaces
1995         if (mTestRule.getCameraSession() != null) {
1996             if (VERBOSE) Log.v(TAG, "Stopping preview");
1997             mTestRule.getCameraSession().stopRepeating();
1998         }
1999     }
2000 
2001     /**
2002      * Configure reader and preview outputs and wait until done.
2003      *
2004      * @return The preview capture request
2005      */
2006     private CaptureRequest configureReaderAndPreviewOutputs(
2007             String id, boolean isColorOutputSupported)
2008             throws Exception {
2009         if (mPreviewSurface == null || mTestRule.getReaderSurface() == null) {
2010             throw new IllegalStateException("preview and reader surface must be initilized first");
2011         }
2012 
2013         // Create previewBuilder
2014         CaptureRequest.Builder previewBuilder =
2015                 mTestRule.getCamera().createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2016         if (isColorOutputSupported) {
2017             previewBuilder.addTarget(mPreviewSurface);
2018         }
2019         previewBuilder.addTarget(mTestRule.getReaderSurface());
2020 
2021 
2022         // Figure out constant target FPS range no larger than 30fps
2023         CameraCharacteristics ch = mTestRule.getStaticInfo().getCharacteristics();
2024         StreamConfigurationMap config =
2025                 ch.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
2026         long minFrameDuration = Math.max(FRAME_DURATION_NS_30FPS,
2027                 config.getOutputMinFrameDuration(mImageReaderFormat, mPreviewSize));
2028 
2029         List<Surface> outputSurfaces = new ArrayList<>();
2030         outputSurfaces.add(mTestRule.getReaderSurface());
2031         if (isColorOutputSupported) {
2032             outputSurfaces.add(mPreviewSurface);
2033             minFrameDuration = Math.max(minFrameDuration,
2034                     config.getOutputMinFrameDuration(SurfaceTexture.class, mPreviewSize));
2035         }
2036         Range<Integer> targetRange =
2037                 CameraTestUtils.getSuitableFpsRangeForDuration(id,
2038                         minFrameDuration, mTestRule.getStaticInfo());
2039         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
2040 
2041         // Create capture session
2042         boolean useSessionKeys = isFpsRangeASessionKey(ch);
2043         CaptureRequest previewRequest = previewBuilder.build();
2044         mTestRule.setCameraSessionListener(new BlockingSessionCallback());
2045         configureAndSetCameraSession(outputSurfaces, useSessionKeys, previewRequest);
2046 
2047         return previewRequest;
2048     }
2049 
2050     /**
2051      * Configure preview outputs and wait until done.
2052      *
2053      * @return The preview capture request builder
2054      */
2055     private CaptureRequest.Builder configurePreviewOutputs(String id)
2056             throws Exception {
2057         if (mPreviewSurface == null) {
2058             throw new IllegalStateException("preview surface must be initialized first");
2059         }
2060 
2061         // Create previewBuilder
2062         CaptureRequest.Builder previewBuilder =
2063                 mTestRule.getCamera().createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
2064         previewBuilder.addTarget(mPreviewSurface);
2065 
2066         // Figure out constant target FPS range no larger than 30fps
2067         CameraCharacteristics ch = mTestRule.getStaticInfo().getCharacteristics();
2068         StreamConfigurationMap config =
2069                 ch.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
2070         long minFrameDuration = Math.max(FRAME_DURATION_NS_30FPS,
2071                 config.getOutputMinFrameDuration(SurfaceTexture.class, mPreviewSize));
2072 
2073         List<Surface> outputSurfaces = new ArrayList<>();
2074         outputSurfaces.add(mPreviewSurface);
2075         Range<Integer> targetRange =
2076                 CameraTestUtils.getSuitableFpsRangeForDuration(id,
2077                         minFrameDuration, mTestRule.getStaticInfo());
2078         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
2079 
2080         // Create capture session
2081         boolean useSessionKeys = isFpsRangeASessionKey(ch);
2082         CaptureRequest previewRequest = previewBuilder.build();
2083         mTestRule.setCameraSessionListener(new BlockingSessionCallback());
2084         configureAndSetCameraSession(outputSurfaces, useSessionKeys, previewRequest);
2085 
2086         return previewBuilder;
2087     }
2088 
2089     /**
2090      * Initialize the ImageReader instance and preview surface.
2091      * @param cameraId The camera to be opened.
2092      * @param format The format used to create ImageReader instance.
2093      */
2094     private void initializeImageReader(String cameraId, int format) throws Exception {
2095         initializeImageReader(cameraId, format, null/*maxFrameDuration*/, 0/*usage*/);
2096     }
2097 
2098     /**
2099      * Initialize the ImageReader instance and preview surface.
2100      * @param cameraId The camera to be opened.
2101      * @param format The format used to create ImageReader instance.
2102      * @param frameDuration The min frame duration of the ImageReader cannot be larger than
2103      *                      frameDuration.
2104      * @param usage The usage of the ImageReader
2105      */
2106     private void initializeImageReader(String cameraId, int format, Long frameDuration, long usage)
2107             throws Exception {
2108         List<Size> boundedSizes = CameraTestUtils.getSortedSizesForFormat(
2109                 cameraId, mTestRule.getCameraManager(), format,
2110                 CameraTestUtils.getPreviewSizeBound(mTestRule.getWindowManager(),
2111                         CameraTestUtils.PREVIEW_SIZE_BOUND));
2112 
2113         // Remove the sizes not meeting the frame duration requirement.
2114         final float kFrameDurationTolerance = 0.01f;
2115         if (frameDuration != null) {
2116             StreamConfigurationMap configMap = mTestRule.getStaticInfo().getValueFromKeyNonNull(
2117                     CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
2118             ListIterator<Size> iter = boundedSizes.listIterator();
2119             while (iter.hasNext()) {
2120                 long duration = configMap.getOutputMinFrameDuration(format, iter.next());
2121                 if (duration > frameDuration * (1 + kFrameDurationTolerance)) {
2122                     iter.remove();
2123                 }
2124             }
2125         }
2126 
2127         mTestRule.setOrderedPreviewSizes(boundedSizes);
2128         mPreviewSize = mTestRule.getOrderedPreviewSizes().get(0);
2129         mImageReaderFormat = format;
2130         if (usage != 0) {
2131             mTestRule.createDefaultImageReader(
2132                     mPreviewSize, format, NUM_MAX_IMAGES, usage, /*listener*/null);
2133         } else {
2134             mTestRule.createDefaultImageReader(
2135                     mPreviewSize, format, NUM_MAX_IMAGES, /*listener*/null);
2136         }
2137     }
2138 
2139     private void simpleOpenCamera(String cameraId) throws Exception {
2140         mTestRule.setCamera(CameraTestUtils.openCamera(
2141                 mTestRule.getCameraManager(), cameraId,
2142                 mTestRule.getCameraListener(), mTestRule.getHandler()));
2143         mTestRule.getCollector().setCameraId(cameraId);
2144         mTestRule.setStaticInfo(new StaticMetadata(
2145                 mTestRule.getCameraManager().getCameraCharacteristics(cameraId),
2146                 CheckLevel.ASSERT, /*collector*/null));
2147     }
2148 
2149     /**
2150      * Simple image listener that can be used to time the availability of first image.
2151      *
2152      */
2153     private static class SimpleImageListener implements ImageReader.OnImageAvailableListener {
2154         private ConditionVariable imageAvailable = new ConditionVariable();
2155         private boolean imageReceived = false;
2156         private long mTimeReceivedImage = 0;
2157 
2158         @Override
2159         public void onImageAvailable(ImageReader reader) {
2160             Image image = null;
2161             if (!imageReceived) {
2162                 if (VERBOSE) {
2163                     Log.v(TAG, "First image arrives");
2164                 }
2165                 imageReceived = true;
2166                 mTimeReceivedImage = SystemClock.elapsedRealtime();
2167                 imageAvailable.open();
2168             }
2169             image = reader.acquireNextImage();
2170             if (image != null) {
2171                 image.close();
2172             }
2173         }
2174 
2175         /**
2176          * Wait for image available, return immediately if the image was already
2177          * received, otherwise wait until an image arrives.
2178          */
2179         public void waitForImageAvailable(long timeout) {
2180             if (imageReceived) {
2181                 imageReceived = false;
2182                 return;
2183             }
2184 
2185             if (imageAvailable.block(timeout)) {
2186                 imageAvailable.close();
2187                 imageReceived = true;
2188             } else {
2189                 throw new TimeoutRuntimeException("Unable to get the first image after "
2190                         + CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS + "ms");
2191             }
2192         }
2193 
2194         public long getTimeReceivedImage() {
2195             return mTimeReceivedImage;
2196         }
2197     }
2198 
2199     /**
2200      * Simple image listener that behaves like a SurfaceView.
2201      */
2202     private static class SimpleTimestampListener
2203             implements ImageReader.OnImageAvailableListener {
2204         public static class TimestampHolder {
2205             public long mDeliveryTime;
2206             public long mTimestamp;
2207             TimestampHolder(long deliveryTime, long timestamp) {
2208                 mDeliveryTime = deliveryTime;
2209                 mTimestamp = timestamp;
2210             }
2211         }
2212 
2213         private final boolean mUseRealtime;
2214 
2215         private final LinkedBlockingQueue<TimestampHolder> mTimestampQueue =
2216                 new LinkedBlockingQueue<TimestampHolder>();
2217 
2218         private boolean mReaderIsValid = true;
2219 
2220         SimpleTimestampListener(boolean timestampIsRealtime) {
2221             mUseRealtime = timestampIsRealtime;
2222         }
2223 
2224         @Override
2225         public synchronized void onImageAvailable(ImageReader reader) {
2226             if (!mReaderIsValid) return;
2227 
2228             try {
2229                 Image image = null;
2230                 image = reader.acquireNextImage();
2231                 if (image != null) {
2232                     long timestamp = image.getTimestamp();
2233                     long currentTimeMillis = mUseRealtime
2234                             ? SystemClock.elapsedRealtime() : SystemClock.uptimeMillis();
2235                     long currentTimeNs = currentTimeMillis * 1000000;
2236                     mTimestampQueue.put(new TimestampHolder(currentTimeNs, timestamp));
2237                     image.close();
2238                 }
2239             } catch (InterruptedException e) {
2240                 throw new UnsupportedOperationException(
2241                         "Can't handle InterruptedException in onImageAvailable");
2242             }
2243         }
2244 
2245         /**
2246          * Get the number of timestamps
2247          */
2248         public int getTimestampCount() {
2249             return mTimestampQueue.size();
2250         }
2251 
2252         /**
2253          * Get the timestamps for next image received.
2254          */
2255         public TimestampHolder getNextTimestampHolder() {
2256             TimestampHolder holder = mTimestampQueue.poll();
2257             return holder;
2258         }
2259 
2260         /**
2261          * Reset the listener to stop handling callbacks.
2262          */
2263         public synchronized void reset() {
2264             mReaderIsValid = false;
2265         }
2266     }
2267 
2268     private static class SimpleTimingResultListener
2269             extends CameraCaptureSession.CaptureCallback {
2270         private final LinkedBlockingQueue<Pair<CaptureResult, Long> > mPartialResultQueue =
2271                 new LinkedBlockingQueue<Pair<CaptureResult, Long> >();
2272         private final LinkedBlockingQueue<Pair<CaptureResult, Long> > mResultQueue =
2273                 new LinkedBlockingQueue<Pair<CaptureResult, Long> > ();
2274 
2275         @Override
2276         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
2277                 TotalCaptureResult result) {
2278             try {
2279                 Long time = SystemClock.elapsedRealtime();
2280                 mResultQueue.put(new Pair<CaptureResult, Long>(result, time));
2281             } catch (InterruptedException e) {
2282                 throw new UnsupportedOperationException(
2283                         "Can't handle InterruptedException in onCaptureCompleted");
2284             }
2285         }
2286 
2287         @Override
2288         public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
2289                 CaptureResult partialResult) {
2290             try {
2291                 // check if AE and AF state exists
2292                 Long time = -1L;
2293                 if (partialResult.get(CaptureResult.CONTROL_AE_STATE) != null &&
2294                         partialResult.get(CaptureResult.CONTROL_AF_STATE) != null) {
2295                     time = SystemClock.elapsedRealtime();
2296                 }
2297                 mPartialResultQueue.put(new Pair<CaptureResult, Long>(partialResult, time));
2298             } catch (InterruptedException e) {
2299                 throw new UnsupportedOperationException(
2300                         "Can't handle InterruptedException in onCaptureProgressed");
2301             }
2302         }
2303 
2304         public Pair<CaptureResult, Long> getPartialResultNTime(long timeout) {
2305             try {
2306                 Pair<CaptureResult, Long> result =
2307                         mPartialResultQueue.poll(timeout, TimeUnit.MILLISECONDS);
2308                 return result;
2309             } catch (InterruptedException e) {
2310                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
2311             }
2312         }
2313 
2314         public Pair<CaptureResult, Long> getCaptureResultNTime(long timeout) {
2315             try {
2316                 Pair<CaptureResult, Long> result =
2317                         mResultQueue.poll(timeout, TimeUnit.MILLISECONDS);
2318                 assertNotNull("Wait for a capture result timed out in " + timeout + "ms", result);
2319                 return result;
2320             } catch (InterruptedException e) {
2321                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
2322             }
2323         }
2324 
2325         public Pair<CaptureResult, Long> getPartialResultNTimeForRequest(CaptureRequest myRequest,
2326                 int numResultsWait) {
2327             if (numResultsWait < 0) {
2328                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
2329             }
2330 
2331             Pair<CaptureResult, Long> result;
2332             int i = 0;
2333             do {
2334                 result = getPartialResultNTime(CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
2335                 // The result may be null if no partials are produced on this particular path, so
2336                 // stop trying
2337                 if (result == null) break;
2338                 if (result.first.getRequest().equals(myRequest)) {
2339                     return result;
2340                 }
2341             } while (i++ < numResultsWait);
2342 
2343             // No partials produced - this may not be an error, since a given device may not
2344             // produce any partials on this testing path
2345             return null;
2346         }
2347 
2348         public Pair<CaptureResult, Long> getCaptureResultNTimeForRequest(CaptureRequest myRequest,
2349                 int numResultsWait) {
2350             if (numResultsWait < 0) {
2351                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
2352             }
2353 
2354             Pair<CaptureResult, Long> result;
2355             int i = 0;
2356             do {
2357                 result = getCaptureResultNTime(CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
2358                 if (result.first.getRequest().equals(myRequest)) {
2359                     return result;
2360                 }
2361             } while (i++ < numResultsWait);
2362 
2363             throw new TimeoutRuntimeException("Unable to get the expected capture result after "
2364                     + "waiting for " + numResultsWait + " results");
2365         }
2366 
2367     }
2368 }
2369