• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.REPORT_LOG_NAME;
20 
21 import static org.junit.Assert.assertNotNull;
22 import static org.junit.Assert.assertTrue;
23 
24 import android.app.Instrumentation;
25 import android.content.Context;
26 import android.graphics.ImageFormat;
27 import android.graphics.SurfaceTexture;
28 import android.hardware.HardwareBuffer;
29 import android.hardware.camera2.CameraAccessException;
30 import android.hardware.camera2.CameraCaptureSession;
31 import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
32 import android.hardware.camera2.CameraCharacteristics;
33 import android.hardware.camera2.CameraDevice;
34 import android.hardware.camera2.CameraMetadata;
35 import android.hardware.camera2.CaptureRequest;
36 import android.hardware.camera2.CaptureResult;
37 import android.hardware.camera2.TotalCaptureResult;
38 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
39 import android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener;
40 import android.hardware.camera2.cts.helpers.StaticMetadata;
41 import android.hardware.camera2.cts.helpers.StaticMetadata.CheckLevel;
42 import android.hardware.camera2.cts.testcases.Camera2AndroidTestRule;
43 import android.hardware.camera2.params.InputConfiguration;
44 import android.hardware.camera2.params.OutputConfiguration;
45 import android.hardware.camera2.params.StreamConfigurationMap;
46 import android.media.Image;
47 import android.media.ImageReader;
48 import android.media.ImageWriter;
49 import android.os.Bundle;
50 import android.os.ConditionVariable;
51 import android.os.SystemClock;
52 import android.util.Log;
53 import android.util.Pair;
54 import android.util.Range;
55 import android.util.Size;
56 import android.view.Surface;
57 
58 import androidx.test.InstrumentationRegistry;
59 import androidx.test.rule.ActivityTestRule;
60 
61 import com.android.compatibility.common.util.DeviceReportLog;
62 import com.android.compatibility.common.util.ResultType;
63 import com.android.compatibility.common.util.ResultUnit;
64 import com.android.compatibility.common.util.Stat;
65 import com.android.ex.camera2.blocking.BlockingSessionCallback;
66 import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
67 
68 import org.junit.Rule;
69 import org.junit.Test;
70 import org.junit.runner.RunWith;
71 import org.junit.runners.JUnit4;
72 
73 import java.util.ArrayList;
74 import java.util.Arrays;
75 import java.util.List;
76 import java.util.ListIterator;
77 import java.util.concurrent.LinkedBlockingQueue;
78 import java.util.concurrent.TimeUnit;
79 
80 /**
81  * Test camera2 API use case performance KPIs, such as camera open time, session creation time,
82  * shutter lag etc. The KPI data will be reported in cts results.
83  */
84 @RunWith(JUnit4.class)
85 public class PerformanceTest {
86     private static final String TAG = "PerformanceTest";
87     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
88     private static final int NUM_TEST_LOOPS = 10;
89     private static final int NUM_MAX_IMAGES = 4;
90     private static final int NUM_RESULTS_WAIT = 30;
91     private static final int[] REPROCESS_FORMATS = {ImageFormat.YUV_420_888, ImageFormat.PRIVATE};
92     private final int MAX_REPROCESS_IMAGES = 6;
93     private final int MAX_JPEG_IMAGES = MAX_REPROCESS_IMAGES;
94     private final int MAX_INPUT_IMAGES = MAX_REPROCESS_IMAGES;
95     // ZSL queue depth should be bigger than the max simultaneous reprocessing capture request
96     // count to maintain reasonable number of candidate image for the worse-case.
97     private final int MAX_ZSL_IMAGES = MAX_REPROCESS_IMAGES * 3 / 2;
98     private final double REPROCESS_STALL_MARGIN = 0.1;
99     private static final int WAIT_FOR_RESULT_TIMEOUT_MS = 3000;
100     private static final int NUM_RESULTS_WAIT_TIMEOUT = 100;
101     private static final int NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY = 8;
102     private static final long FRAME_DURATION_NS_30FPS = 33333333L;
103     private static final int NUM_ZOOM_STEPS = 10;
104     private static final String HAS_ACTIVITY_ARG_KEY = "has-activity";
105 
106     private DeviceReportLog mReportLog;
107 
108     // Used for reading camera output buffers.
109     private ImageReader mCameraZslReader;
110     private SimpleImageReaderListener mCameraZslImageListener;
111     // Used for reprocessing (jpeg) output.
112     private ImageReader mJpegReader;
113     private SimpleImageReaderListener mJpegListener;
114     // Used for reprocessing input.
115     private ImageWriter mWriter;
116     private SimpleCaptureCallback mZslResultListener;
117 
118     private Size mPreviewSize;
119     private Surface mPreviewSurface;
120     private SurfaceTexture mPreviewSurfaceTexture;
121     private int mImageReaderFormat;
122 
123     private static final Instrumentation mInstrumentation =
124             InstrumentationRegistry.getInstrumentation();
125     private static final Context mContext = InstrumentationRegistry.getTargetContext();
126 
127     @Rule
128     public final Camera2AndroidTestRule mTestRule = new Camera2AndroidTestRule(mContext);
129 
130     // b/284352937: Display an activity with SurfaceView so that camera's effect on refresh
131     // rate takes precedence.
132     //
133     // - If no activity is displayed, home screen would vote for a completely different refresh
134     // rate. Some examples are 24hz and 144hz. These doesn't reflect the actual refresh rate
135     // when camera runs with a SurfaceView.
136     // - The testSurfaceViewJitterReduction needs to read timestamps for each output image. If
137     // we directly connect camera to SurfaceView, we won't have access to timestamps.
138     //
139     // So the solution is that if no activity already exists, create an activity with SurfaceView,
140     // but not connect it to camera.
141     @Rule
142     public final ActivityTestRule<Camera2SurfaceViewCtsActivity> mActivityRule =
143             createActivityRuleIfNeeded();
144 
createActivityRuleIfNeeded()145     private static ActivityTestRule<Camera2SurfaceViewCtsActivity> createActivityRuleIfNeeded() {
146         Bundle bundle = InstrumentationRegistry.getArguments();
147         byte hasActivity = bundle.getByte(HAS_ACTIVITY_ARG_KEY);
148 
149         // If the caller already has an activity, do not create the ActivityTestRule.
150         if (hasActivity != 0) {
151             return null;
152         } else {
153             return new ActivityTestRule<>(Camera2SurfaceViewCtsActivity.class);
154         }
155     }
156 
157     /**
158      * Test camera launch KPI: the time duration between a camera device is
159      * being opened and first preview frame is available.
160      * <p>
161      * It includes camera open time, session creation time, and sending first
162      * preview request processing latency etc. For the SurfaceView based preview use
163      * case, there is no way for client to know the exact preview frame
164      * arrival time. To approximate this time, a companion YUV420_888 stream is
165      * created. The first YUV420_888 Image coming out of the ImageReader is treated
166      * as the first preview arrival time.</p>
167      * <p>
168      * For depth-only devices, timing is done with the DEPTH16 format instead.
169      * </p>
170      */
171     @Test
testCameraLaunch()172     public void testCameraLaunch() throws Exception {
173         double[] avgCameraLaunchTimes = new double[mTestRule.getCameraIdsUnderTest().length];
174 
175         int counter = 0;
176         for (String id : mTestRule.getCameraIdsUnderTest()) {
177             // Do NOT move these variables to outer scope
178             // They will be passed to DeviceReportLog and their references will be stored
179             String streamName = "test_camera_launch";
180             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
181             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
182             double[] cameraOpenTimes = new double[NUM_TEST_LOOPS];
183             double[] configureStreamTimes = new double[NUM_TEST_LOOPS];
184             double[] startPreviewTimes = new double[NUM_TEST_LOOPS];
185             double[] stopPreviewTimes = new double[NUM_TEST_LOOPS];
186             double[] cameraCloseTimes = new double[NUM_TEST_LOOPS];
187             double[] cameraLaunchTimes = new double[NUM_TEST_LOOPS];
188             try {
189                 CameraCharacteristics ch =
190                         mTestRule.getCameraManager().getCameraCharacteristics(id);
191                 mTestRule.setStaticInfo(new StaticMetadata(ch));
192                 boolean isColorOutputSupported = mTestRule.getStaticInfo().isColorOutputSupported();
193                 if (isColorOutputSupported) {
194                     initializeImageReader(id, ImageFormat.YUV_420_888);
195                 } else {
196                     assertTrue("Depth output must be supported if regular output isn't!",
197                             mTestRule.getStaticInfo().isDepthOutputSupported());
198                     initializeImageReader(id, ImageFormat.DEPTH16);
199                 }
200                 updatePreviewSurface(mPreviewSize);
201 
202                 SimpleImageListener imageListener = null;
203                 long startTimeMs, openTimeMs, configureTimeMs, previewStartedTimeMs;
204                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
205                     try {
206                         // Need create a new listener every iteration to be able to wait
207                         // for the first image comes out.
208                         imageListener = new SimpleImageListener();
209                         mTestRule.getReader().setOnImageAvailableListener(
210                                 imageListener, mTestRule.getHandler());
211                         startTimeMs = SystemClock.elapsedRealtime();
212 
213                         // Blocking open camera
214                         simpleOpenCamera(id);
215                         openTimeMs = SystemClock.elapsedRealtime();
216                         cameraOpenTimes[i] = openTimeMs - startTimeMs;
217 
218                         // Blocking configure outputs.
219                         CaptureRequest previewRequest =
220                                 configureReaderAndPreviewOutputs(id, isColorOutputSupported);
221                         configureTimeMs = SystemClock.elapsedRealtime();
222                         configureStreamTimes[i] = configureTimeMs - openTimeMs;
223 
224                         // Blocking start preview (start preview to first image arrives)
225                         SimpleCaptureCallback resultListener =
226                                 new SimpleCaptureCallback();
227                         blockingStartPreview(id, resultListener, previewRequest, imageListener);
228                         previewStartedTimeMs = SystemClock.elapsedRealtime();
229                         startPreviewTimes[i] = previewStartedTimeMs - configureTimeMs;
230                         cameraLaunchTimes[i] = previewStartedTimeMs - startTimeMs;
231 
232                         // Let preview on for a couple of frames
233                         CameraTestUtils.waitForNumResults(resultListener, NUM_RESULTS_WAIT,
234                                 WAIT_FOR_RESULT_TIMEOUT_MS);
235 
236                         // Blocking stop preview
237                         startTimeMs = SystemClock.elapsedRealtime();
238                         blockingStopRepeating();
239                         stopPreviewTimes[i] = SystemClock.elapsedRealtime() - startTimeMs;
240                     }
241                     finally {
242                         // Blocking camera close
243                         startTimeMs = SystemClock.elapsedRealtime();
244                         mTestRule.closeDevice(id);
245                         cameraCloseTimes[i] = SystemClock.elapsedRealtime() - startTimeMs;
246                     }
247                 }
248 
249                 avgCameraLaunchTimes[counter] = Stat.getAverage(cameraLaunchTimes);
250                 // Finish the data collection, report the KPIs.
251                 // ReportLog keys have to be lowercase underscored format.
252                 mReportLog.addValues("camera_open_time", cameraOpenTimes, ResultType.LOWER_BETTER,
253                         ResultUnit.MS);
254                 mReportLog.addValues("camera_configure_stream_time", configureStreamTimes,
255                         ResultType.LOWER_BETTER, ResultUnit.MS);
256                 mReportLog.addValues("camera_start_preview_time", startPreviewTimes,
257                         ResultType.LOWER_BETTER, ResultUnit.MS);
258                 mReportLog.addValues("camera_camera_stop_preview", stopPreviewTimes,
259                         ResultType.LOWER_BETTER, ResultUnit.MS);
260                 mReportLog.addValues("camera_camera_close_time", cameraCloseTimes,
261                         ResultType.LOWER_BETTER, ResultUnit.MS);
262                 mReportLog.addValues("camera_launch_time", cameraLaunchTimes,
263                         ResultType.LOWER_BETTER, ResultUnit.MS);
264             }
265             finally {
266                 mTestRule.closeDefaultImageReader();
267                 closePreviewSurface();
268             }
269             counter++;
270             mReportLog.submit(mInstrumentation);
271 
272             if (VERBOSE) {
273                 Log.v(TAG, "Camera " + id + " device open times(ms): "
274                         + Arrays.toString(cameraOpenTimes)
275                         + ". Average(ms): " + Stat.getAverage(cameraOpenTimes)
276                         + ". Min(ms): " + Stat.getMin(cameraOpenTimes)
277                         + ". Max(ms): " + Stat.getMax(cameraOpenTimes));
278                 Log.v(TAG, "Camera " + id + " configure stream times(ms): "
279                         + Arrays.toString(configureStreamTimes)
280                         + ". Average(ms): " + Stat.getAverage(configureStreamTimes)
281                         + ". Min(ms): " + Stat.getMin(configureStreamTimes)
282                         + ". Max(ms): " + Stat.getMax(configureStreamTimes));
283                 Log.v(TAG, "Camera " + id + " start preview times(ms): "
284                         + Arrays.toString(startPreviewTimes)
285                         + ". Average(ms): " + Stat.getAverage(startPreviewTimes)
286                         + ". Min(ms): " + Stat.getMin(startPreviewTimes)
287                         + ". Max(ms): " + Stat.getMax(startPreviewTimes));
288                 Log.v(TAG, "Camera " + id + " stop preview times(ms): "
289                         + Arrays.toString(stopPreviewTimes)
290                         + ". Average(ms): " + Stat.getAverage(stopPreviewTimes)
291                         + ". nMin(ms): " + Stat.getMin(stopPreviewTimes)
292                         + ". nMax(ms): " + Stat.getMax(stopPreviewTimes));
293                 Log.v(TAG, "Camera " + id + " device close times(ms): "
294                         + Arrays.toString(cameraCloseTimes)
295                         + ". Average(ms): " + Stat.getAverage(cameraCloseTimes)
296                         + ". Min(ms): " + Stat.getMin(cameraCloseTimes)
297                         + ". Max(ms): " + Stat.getMax(cameraCloseTimes));
298                 Log.v(TAG, "Camera " + id + " camera launch times(ms): "
299                         + Arrays.toString(cameraLaunchTimes)
300                         + ". Average(ms): " + Stat.getAverage(cameraLaunchTimes)
301                         + ". Min(ms): " + Stat.getMin(cameraLaunchTimes)
302                         + ". Max(ms): " + Stat.getMax(cameraLaunchTimes));
303             }
304         }
305         if (mTestRule.getCameraIdsUnderTest().length != 0) {
306             String streamName = "test_camera_launch_average";
307             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
308             mReportLog.setSummary("camera_launch_average_time_for_all_cameras",
309                     Stat.getAverage(avgCameraLaunchTimes), ResultType.LOWER_BETTER, ResultUnit.MS);
310             mReportLog.submit(mInstrumentation);
311         }
312     }
313 
314     /**
315      * Test camera capture KPI for YUV_420_888, PRIVATE, JPEG, RAW and RAW+JPEG
316      * formats: the time duration between sending out a single image capture request
317      * and receiving image data and capture result.
318      * <p>
319      * It enumerates the following metrics: capture latency, computed by
320      * measuring the time between sending out the capture request and getting
321      * the image data; partial result latency, computed by measuring the time
322      * between sending out the capture request and getting the partial result;
323      * capture result latency, computed by measuring the time between sending
324      * out the capture request and getting the full capture result.
325      * </p>
326      */
327     @Test
testSingleCapture()328     public void testSingleCapture() throws Exception {
329         int[] JPEG_FORMAT = {ImageFormat.JPEG};
330         testSingleCaptureForFormat(JPEG_FORMAT, "jpeg", /*addPreviewDelay*/ true);
331         if (!mTestRule.isPerfMeasure()) {
332             int[] JPEG_R_FORMAT = {ImageFormat.JPEG_R};
333             testSingleCaptureForFormat(JPEG_R_FORMAT, "jpeg_r", /*addPreviewDelay*/ true,
334                     /*enablePostview*/ false);
335             int[] YUV_FORMAT = {ImageFormat.YUV_420_888};
336             testSingleCaptureForFormat(YUV_FORMAT, null, /*addPreviewDelay*/ true);
337             int[] PRIVATE_FORMAT = {ImageFormat.PRIVATE};
338             testSingleCaptureForFormat(PRIVATE_FORMAT, "private", /*addPreviewDelay*/ true);
339             int[] RAW_FORMAT = {ImageFormat.RAW_SENSOR};
340             testSingleCaptureForFormat(RAW_FORMAT, "raw", /*addPreviewDelay*/ true);
341             int[] RAW_JPEG_FORMATS = {ImageFormat.RAW_SENSOR, ImageFormat.JPEG};
342             testSingleCaptureForFormat(RAW_JPEG_FORMATS, "raw_jpeg", /*addPreviewDelay*/ true);
343         }
344     }
345 
appendFormatDescription(String message, String formatDescription)346     private String appendFormatDescription(String message, String formatDescription) {
347         if (message == null) {
348             return null;
349         }
350 
351         String ret = message;
352         if (formatDescription != null) {
353             ret = String.format(ret + "_%s", formatDescription);
354         }
355 
356         return ret;
357     }
358 
testSingleCaptureForFormat(int[] formats, String formatDescription, boolean addPreviewDelay)359     private void testSingleCaptureForFormat(int[] formats, String formatDescription,
360             boolean addPreviewDelay) throws Exception {
361        testSingleCaptureForFormat(formats, formatDescription, addPreviewDelay,
362                /*enablePostview*/ true);
363     }
364 
testSingleCaptureForFormat(int[] formats, String formatDescription, boolean addPreviewDelay, boolean enablePostview)365     private void testSingleCaptureForFormat(int[] formats, String formatDescription,
366             boolean addPreviewDelay, boolean enablePostview) throws Exception {
367         double[] avgResultTimes = new double[mTestRule.getCameraIdsUnderTest().length];
368         double[] avgCaptureTimes = new double[mTestRule.getCameraIdsUnderTest().length];
369 
370         int counter = 0;
371         for (String id : mTestRule.getCameraIdsUnderTest()) {
372             // Do NOT move these variables to outer scope
373             // They will be passed to DeviceReportLog and their references will be stored
374             String streamName = appendFormatDescription("test_single_capture", formatDescription);
375             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
376             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
377             double[] captureTimes = new double[NUM_TEST_LOOPS];
378             double[] getPartialTimes = new double[NUM_TEST_LOOPS];
379             double[] getResultTimes = new double[NUM_TEST_LOOPS];
380             ImageReader[] readers = null;
381             try {
382                 if (!mTestRule.getAllStaticInfo().get(id).isColorOutputSupported()) {
383                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
384                     continue;
385                 }
386 
387                 StreamConfigurationMap configMap = mTestRule.getAllStaticInfo().get(
388                         id).getCharacteristics().get(
389                         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
390                 boolean formatsSupported = true;
391                 for (int format : formats) {
392                     if (!configMap.isOutputSupportedFor(format)) {
393                         Log.i(TAG, "Camera " + id + " does not support output format: " + format +
394                                 " skipping");
395                         formatsSupported = false;
396                         break;
397                     }
398                 }
399                 if (!formatsSupported) {
400                     continue;
401                 }
402 
403                 mTestRule.openDevice(id);
404 
405                 boolean partialsExpected = mTestRule.getStaticInfo().getPartialResultCount() > 1;
406                 long startTimeMs;
407                 boolean isPartialTimingValid = partialsExpected;
408                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
409 
410                     // setup builders and listeners
411                     CaptureRequest.Builder previewBuilder =
412                             mTestRule.getCamera().createCaptureRequest(
413                                     CameraDevice.TEMPLATE_PREVIEW);
414                     CaptureRequest.Builder captureBuilder =
415                             mTestRule.getCamera().createCaptureRequest(
416                                     CameraDevice.TEMPLATE_STILL_CAPTURE);
417                     SimpleCaptureCallback previewResultListener =
418                             new SimpleCaptureCallback();
419                     SimpleTimingResultListener captureResultListener =
420                             new SimpleTimingResultListener();
421                     SimpleImageListener[] imageListeners = new SimpleImageListener[formats.length];
422                     Size[] imageSizes = new Size[formats.length];
423                     for (int j = 0; j < formats.length; j++) {
424                         Size sizeBound = mTestRule.isPerfClassTest() ? new Size(1920, 1080) : null;
425                         imageSizes[j] = CameraTestUtils.getSortedSizesForFormat(
426                                 id,
427                                 mTestRule.getCameraManager(),
428                                 formats[j],
429                                 sizeBound).get(0);
430                         imageListeners[j] = new SimpleImageListener();
431                     }
432 
433                     readers = prepareStillCaptureAndStartPreview(id, previewBuilder, captureBuilder,
434                             mTestRule.getOrderedPreviewSizes().get(0), imageSizes, formats,
435                             previewResultListener, NUM_MAX_IMAGES, imageListeners, enablePostview);
436 
437                     if (addPreviewDelay) {
438                         Thread.sleep(500);
439                     }
440 
441                     // Capture an image and get image data
442                     startTimeMs = SystemClock.elapsedRealtime();
443                     CaptureRequest request = captureBuilder.build();
444                     mTestRule.getCameraSession().capture(
445                             request, captureResultListener, mTestRule.getHandler());
446 
447                     Pair<CaptureResult, Long> partialResultNTime = null;
448                     if (partialsExpected) {
449                         partialResultNTime = captureResultListener.getPartialResultNTimeForRequest(
450                                 request, NUM_RESULTS_WAIT);
451                         // Even if maxPartials > 1, may not see partials for some devices
452                         if (partialResultNTime == null) {
453                             partialsExpected = false;
454                             isPartialTimingValid = false;
455                         }
456                     }
457                     Pair<CaptureResult, Long> captureResultNTime =
458                             captureResultListener.getCaptureResultNTimeForRequest(
459                                     request, NUM_RESULTS_WAIT);
460 
461                     double [] imageTimes = new double[formats.length];
462                     for (int j = 0; j < formats.length; j++) {
463                         imageListeners[j].waitForImageAvailable(
464                                 CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
465                         imageTimes[j] = imageListeners[j].getTimeReceivedImage();
466                     }
467 
468                     captureTimes[i] = Stat.getAverage(imageTimes) - startTimeMs;
469                     if (partialsExpected) {
470                         getPartialTimes[i] = partialResultNTime.second - startTimeMs;
471                         if (getPartialTimes[i] < 0) {
472                             isPartialTimingValid = false;
473                         }
474                     }
475                     getResultTimes[i] = captureResultNTime.second - startTimeMs;
476 
477                     // simulate real scenario (preview runs a bit)
478                     CameraTestUtils.waitForNumResults(previewResultListener, NUM_RESULTS_WAIT,
479                             WAIT_FOR_RESULT_TIMEOUT_MS);
480 
481                     blockingStopRepeating();
482 
483                     CameraTestUtils.closeImageReaders(readers);
484                     readers = null;
485                 }
486                 String message = appendFormatDescription("camera_capture_latency",
487                         formatDescription);
488                 mReportLog.addValues(message, captureTimes, ResultType.LOWER_BETTER, ResultUnit.MS);
489                 // If any of the partial results do not contain AE and AF state, then no report
490                 if (isPartialTimingValid) {
491                     message = appendFormatDescription("camera_partial_result_latency",
492                             formatDescription);
493                     mReportLog.addValues(message, getPartialTimes, ResultType.LOWER_BETTER,
494                             ResultUnit.MS);
495                 }
496                 message = appendFormatDescription("camera_capture_result_latency",
497                         formatDescription);
498                 mReportLog.addValues(message, getResultTimes, ResultType.LOWER_BETTER,
499                         ResultUnit.MS);
500 
501                 avgResultTimes[counter] = Stat.getAverage(getResultTimes);
502                 avgCaptureTimes[counter] = Stat.getAverage(captureTimes);
503             }
504             finally {
505                 CameraTestUtils.closeImageReaders(readers);
506                 readers = null;
507                 mTestRule.closeDevice(id);
508                 closePreviewSurface();
509             }
510             counter++;
511             mReportLog.submit(mInstrumentation);
512         }
513 
514         // Result will not be reported in CTS report if no summary is printed.
515         if (mTestRule.getCameraIdsUnderTest().length != 0) {
516             String streamName = appendFormatDescription("test_single_capture_average",
517                     formatDescription);
518             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
519             // In performance measurement mode, capture the buffer latency rather than result
520             // latency.
521             if (mTestRule.isPerfMeasure()) {
522                 String message = appendFormatDescription(
523                         "camera_capture_average_latency_for_all_cameras", formatDescription);
524                 mReportLog.setSummary(message, Stat.getAverage(avgCaptureTimes),
525                         ResultType.LOWER_BETTER, ResultUnit.MS);
526             } else {
527                 String message = appendFormatDescription(
528                         "camera_capture_result_average_latency_for_all_cameras", formatDescription);
529                 mReportLog.setSummary(message, Stat.getAverage(avgResultTimes),
530                         ResultType.LOWER_BETTER, ResultUnit.MS);
531             }
532             mReportLog.submit(mInstrumentation);
533         }
534     }
535 
536     /**
537      * Test multiple capture KPI for YUV_420_888 format: the average time duration
538      * between sending out image capture requests and receiving capture results.
539      * <p>
540      * It measures capture latency, which is the time between sending out the capture
541      * request and getting the full capture result, and the frame duration, which is the timestamp
542      * gap between results.
543      * </p>
544      */
545     @Test
testMultipleCapture()546     public void testMultipleCapture() throws Exception {
547         double[] avgResultTimes = new double[mTestRule.getCameraIdsUnderTest().length];
548         double[] avgDurationMs = new double[mTestRule.getCameraIdsUnderTest().length];
549 
550         // A simple CaptureSession StateCallback to handle onCaptureQueueEmpty
551         class MultipleCaptureStateCallback extends CameraCaptureSession.StateCallback {
552             private ConditionVariable captureQueueEmptyCond = new ConditionVariable();
553             private int captureQueueEmptied = 0;
554 
555             @Override
556             public void onConfigured(CameraCaptureSession session) {
557                 // Empty implementation
558             }
559 
560             @Override
561             public void onConfigureFailed(CameraCaptureSession session) {
562                 // Empty implementation
563             }
564 
565             @Override
566             public void onCaptureQueueEmpty(CameraCaptureSession session) {
567                 captureQueueEmptied++;
568                 if (VERBOSE) {
569                     Log.v(TAG, "onCaptureQueueEmpty received. captureQueueEmptied = "
570                             + captureQueueEmptied);
571                 }
572 
573                 captureQueueEmptyCond.open();
574             }
575 
576             /* Wait for onCaptureQueueEmpty, return immediately if an onCaptureQueueEmpty was
577              * already received, otherwise, wait for one to arrive. */
578             public void waitForCaptureQueueEmpty(long timeout) {
579                 if (captureQueueEmptied > 0) {
580                     captureQueueEmptied--;
581                     return;
582                 }
583 
584                 if (captureQueueEmptyCond.block(timeout)) {
585                     captureQueueEmptyCond.close();
586                     captureQueueEmptied = 0;
587                 } else {
588                     throw new TimeoutRuntimeException("Unable to receive onCaptureQueueEmpty after "
589                             + timeout + "ms");
590                 }
591             }
592         }
593 
594         final MultipleCaptureStateCallback sessionListener = new MultipleCaptureStateCallback();
595 
596         int counter = 0;
597         for (String id : mTestRule.getCameraIdsUnderTest()) {
598             // Do NOT move these variables to outer scope
599             // They will be passed to DeviceReportLog and their references will be stored
600             String streamName = "test_multiple_capture";
601             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
602             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
603             long[] startTimes = new long[NUM_MAX_IMAGES];
604             double[] getResultTimes = new double[NUM_MAX_IMAGES];
605             double[] frameDurationMs = new double[NUM_MAX_IMAGES-1];
606             try {
607                 StaticMetadata staticMetadata = mTestRule.getAllStaticInfo().get(id);
608                 if (!staticMetadata.isColorOutputSupported()) {
609                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
610                     continue;
611                 }
612                 boolean useSessionKeys = isFpsRangeASessionKey(staticMetadata.getCharacteristics());
613 
614                 mTestRule.openDevice(id);
615                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
616 
617                     // setup builders and listeners
618                     CaptureRequest.Builder previewBuilder =
619                             mTestRule.getCamera().createCaptureRequest(
620                                     CameraDevice.TEMPLATE_PREVIEW);
621                     CaptureRequest.Builder captureBuilder =
622                             mTestRule.getCamera().createCaptureRequest(
623                                     CameraDevice.TEMPLATE_STILL_CAPTURE);
624                     SimpleCaptureCallback previewResultListener =
625                             new SimpleCaptureCallback();
626                     SimpleTimingResultListener captureResultListener =
627                             new SimpleTimingResultListener();
628                     SimpleImageReaderListener imageListener =
629                             new SimpleImageReaderListener(/*asyncMode*/true, NUM_MAX_IMAGES);
630 
631                     Size maxYuvSize = CameraTestUtils.getSortedSizesForFormat(
632                             id, mTestRule.getCameraManager(),
633                             ImageFormat.YUV_420_888, /*bound*/null).get(0);
634                     // Find minimum frame duration for YUV_420_888
635                     StreamConfigurationMap config =
636                             mTestRule.getStaticInfo().getCharacteristics().get(
637                             CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
638 
639                     final long minStillFrameDuration =
640                             config.getOutputMinFrameDuration(ImageFormat.YUV_420_888, maxYuvSize);
641                     if (minStillFrameDuration > 0) {
642                         Range<Integer> targetRange =
643                                 CameraTestUtils.getSuitableFpsRangeForDuration(id,
644                                         minStillFrameDuration, mTestRule.getStaticInfo());
645                         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
646                         captureBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
647                     }
648 
649                     prepareCaptureAndStartPreview(previewBuilder, captureBuilder,
650                             mTestRule.getOrderedPreviewSizes().get(0), maxYuvSize,
651                             ImageFormat.YUV_420_888, previewResultListener,
652                             sessionListener, NUM_MAX_IMAGES, imageListener,
653                             useSessionKeys);
654 
655                     // Converge AE
656                     CameraTestUtils.waitForAeStable(previewResultListener,
657                             NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY, mTestRule.getStaticInfo(),
658                             WAIT_FOR_RESULT_TIMEOUT_MS, NUM_RESULTS_WAIT_TIMEOUT);
659 
660                     if (mTestRule.getStaticInfo().isAeLockSupported()) {
661                         // Lock AE if possible to improve stability
662                         previewBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
663                         mTestRule.getCameraSession().setRepeatingRequest(previewBuilder.build(),
664                                 previewResultListener, mTestRule.getHandler());
665                         if (mTestRule.getStaticInfo().isHardwareLevelAtLeastLimited()) {
666                             // Legacy mode doesn't output AE state
667                             CameraTestUtils.waitForResultValue(previewResultListener,
668                                     CaptureResult.CONTROL_AE_STATE,
669                                     CaptureResult.CONTROL_AE_STATE_LOCKED,
670                                     NUM_RESULTS_WAIT_TIMEOUT, WAIT_FOR_RESULT_TIMEOUT_MS);
671                         }
672                     }
673 
674                     // Capture NUM_MAX_IMAGES images based on onCaptureQueueEmpty callback
675                     for (int j = 0; j < NUM_MAX_IMAGES; j++) {
676 
677                         // Capture an image and get image data
678                         startTimes[j] = SystemClock.elapsedRealtime();
679                         CaptureRequest request = captureBuilder.build();
680                         mTestRule.getCameraSession().capture(
681                                 request, captureResultListener, mTestRule.getHandler());
682 
683                         // Wait for capture queue empty for the current request
684                         sessionListener.waitForCaptureQueueEmpty(
685                                 CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
686                     }
687 
688                     // Acquire the capture result time and frame duration
689                     long prevTimestamp = -1;
690                     for (int j = 0; j < NUM_MAX_IMAGES; j++) {
691                         Pair<CaptureResult, Long> captureResultNTime =
692                                 captureResultListener.getCaptureResultNTime(
693                                         CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
694 
695                         getResultTimes[j] +=
696                                 (double)(captureResultNTime.second - startTimes[j])/NUM_TEST_LOOPS;
697 
698                         // Collect inter-frame timestamp
699                         long timestamp = captureResultNTime.first.get(
700                                 CaptureResult.SENSOR_TIMESTAMP);
701                         if (prevTimestamp != -1) {
702                             frameDurationMs[j-1] +=
703                                     (double)(timestamp - prevTimestamp)/(
704                                             NUM_TEST_LOOPS * 1000000.0);
705                         }
706                         prevTimestamp = timestamp;
707                     }
708 
709                     // simulate real scenario (preview runs a bit)
710                     CameraTestUtils.waitForNumResults(previewResultListener, NUM_RESULTS_WAIT,
711                             WAIT_FOR_RESULT_TIMEOUT_MS);
712 
713                     stopRepeating();
714                 }
715 
716                 for (int i = 0; i < getResultTimes.length; i++) {
717                     Log.v(TAG, "Camera " + id + " result time[" + i + "] is " +
718                             getResultTimes[i] + " ms");
719                 }
720                 for (int i = 0; i < NUM_MAX_IMAGES-1; i++) {
721                     Log.v(TAG, "Camera " + id + " frame duration time[" + i + "] is " +
722                             frameDurationMs[i] + " ms");
723                 }
724 
725                 mReportLog.addValues("camera_multiple_capture_result_latency", getResultTimes,
726                         ResultType.LOWER_BETTER, ResultUnit.MS);
727                 mReportLog.addValues("camera_multiple_capture_frame_duration", frameDurationMs,
728                         ResultType.LOWER_BETTER, ResultUnit.MS);
729 
730 
731                 avgResultTimes[counter] = Stat.getAverage(getResultTimes);
732                 avgDurationMs[counter] = Stat.getAverage(frameDurationMs);
733             }
734             finally {
735                 mTestRule.closeDefaultImageReader();
736                 mTestRule.closeDevice(id);
737                 closePreviewSurface();
738             }
739             counter++;
740             mReportLog.submit(mInstrumentation);
741         }
742 
743         // Result will not be reported in CTS report if no summary is printed.
744         if (mTestRule.getCameraIdsUnderTest().length != 0) {
745             String streamName = "test_multiple_capture_average";
746             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
747             mReportLog.setSummary("camera_multiple_capture_result_average_latency_for_all_cameras",
748                     Stat.getAverage(avgResultTimes), ResultType.LOWER_BETTER, ResultUnit.MS);
749             mReportLog.submit(mInstrumentation);
750             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
751             mReportLog.setSummary("camera_multiple_capture_frame_duration_average_for_all_cameras",
752                     Stat.getAverage(avgDurationMs), ResultType.LOWER_BETTER, ResultUnit.MS);
753             mReportLog.submit(mInstrumentation);
754         }
755     }
756 
757     /**
758      * Test reprocessing shot-to-shot latency with default NR and edge options, i.e., from the time
759      * a reprocess request is issued to the time the reprocess image is returned.
760      */
761     @Test
testReprocessingLatency()762     public void testReprocessingLatency() throws Exception {
763         for (String id : mTestRule.getCameraIdsUnderTest()) {
764             for (int format : REPROCESS_FORMATS) {
765                 if (!isReprocessSupported(id, format)) {
766                     continue;
767                 }
768 
769                 try {
770                     mTestRule.openDevice(id);
771                     String streamName = "test_reprocessing_latency";
772                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
773                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
774                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
775                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/false,
776                             /*highQuality*/false);
777                 } finally {
778                     closeReaderWriters();
779                     mTestRule.closeDevice(id);
780                     closePreviewSurface();
781                     mReportLog.submit(mInstrumentation);
782                 }
783             }
784         }
785     }
786 
787     /**
788      * Test reprocessing throughput with default NR and edge options,
789      * i.e., how many frames can be reprocessed during a given amount of time.
790      *
791      */
792     @Test
testReprocessingThroughput()793     public void testReprocessingThroughput() throws Exception {
794         for (String id : mTestRule.getCameraIdsUnderTest()) {
795             for (int format : REPROCESS_FORMATS) {
796                 if (!isReprocessSupported(id, format)) {
797                     continue;
798                 }
799 
800                 try {
801                     mTestRule.openDevice(id);
802                     String streamName = "test_reprocessing_throughput";
803                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
804                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
805                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
806                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/true,
807                             /*highQuality*/false);
808                 } finally {
809                     closeReaderWriters();
810                     mTestRule.closeDevice(id);
811                     closePreviewSurface();
812                     mReportLog.submit(mInstrumentation);
813                 }
814             }
815         }
816     }
817 
818     /**
819      * Test reprocessing shot-to-shot latency with High Quality NR and edge options, i.e., from the
820      * time a reprocess request is issued to the time the reprocess image is returned.
821      */
822     @Test
testHighQualityReprocessingLatency()823     public void testHighQualityReprocessingLatency() throws Exception {
824         for (String id : mTestRule.getCameraIdsUnderTest()) {
825             for (int format : REPROCESS_FORMATS) {
826                 if (!isReprocessSupported(id, format)) {
827                     continue;
828                 }
829 
830                 try {
831                     mTestRule.openDevice(id);
832                     String streamName = "test_high_quality_reprocessing_latency";
833                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
834                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
835                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
836                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/false,
837                             /*requireHighQuality*/true);
838                 } finally {
839                     closeReaderWriters();
840                     mTestRule.closeDevice(id);
841                     closePreviewSurface();
842                     mReportLog.submit(mInstrumentation);
843                 }
844             }
845         }
846     }
847 
848     /**
849      * Test reprocessing throughput with high quality NR and edge options, i.e., how many frames can
850      * be reprocessed during a given amount of time.
851      *
852      */
853     @Test
testHighQualityReprocessingThroughput()854     public void testHighQualityReprocessingThroughput() throws Exception {
855         for (String id : mTestRule.getCameraIdsUnderTest()) {
856             for (int format : REPROCESS_FORMATS) {
857                 if (!isReprocessSupported(id, format)) {
858                     continue;
859                 }
860 
861                 try {
862                     mTestRule.openDevice(id);
863                     String streamName = "test_high_quality_reprocessing_throughput";
864                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
865                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
866                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
867                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/true,
868                             /*requireHighQuality*/true);
869                 } finally {
870                     closeReaderWriters();
871                     mTestRule.closeDevice(id);
872                     closePreviewSurface();
873                     mReportLog.submit(mInstrumentation);
874                 }
875             }
876         }
877     }
878 
879     /**
880      * Testing reprocessing caused preview stall (frame drops)
881      */
882     @Test
testReprocessingCaptureStall()883     public void testReprocessingCaptureStall() throws Exception {
884         for (String id : mTestRule.getCameraIdsUnderTest()) {
885             for (int format : REPROCESS_FORMATS) {
886                 if (!isReprocessSupported(id, format)) {
887                     continue;
888                 }
889 
890                 try {
891                     mTestRule.openDevice(id);
892                     String streamName = "test_reprocessing_capture_stall";
893                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
894                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
895                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
896                     reprocessingCaptureStallTestByCamera(format);
897                 } finally {
898                     closeReaderWriters();
899                     mTestRule.closeDevice(id);
900                     closePreviewSurface();
901                     mReportLog.submit(mInstrumentation);
902                 }
903             }
904         }
905     }
906 
907     // Direction of zoom: in or out
908     private enum ZoomDirection {
909         ZOOM_IN,
910         ZOOM_OUT;
911     }
912 
913     // Range of zoom: >= 1.0x, <= 1.0x, or full range.
914     private enum ZoomRange {
915         RATIO_1_OR_LARGER,
916         RATIO_1_OR_SMALLER,
917         RATIO_FULL_RANGE;
918     }
919 
920     /**
921      * Testing Zoom settings override performance for zoom in from 1.0x
922      *
923      * The range of zoomRatio being tested is [1.0x, maxZoomRatio]
924      */
925     @Test
testZoomSettingsOverrideLatencyInFrom1x()926     public void testZoomSettingsOverrideLatencyInFrom1x() throws Exception {
927         testZoomSettingsOverrideLatency("zoom_in_from_1x",
928                 ZoomDirection.ZOOM_IN, ZoomRange.RATIO_1_OR_LARGER);
929     }
930 
931     /**
932      * Testing Zoom settings override performance for zoom out to 1.0x
933      *
934      * The range of zoomRatio being tested is [maxZoomRatio, 1.0x]
935      */
936     @Test
testZoomSettingsOverrideLatencyOutTo1x()937     public void testZoomSettingsOverrideLatencyOutTo1x() throws Exception {
938         testZoomSettingsOverrideLatency("zoom_out_to_1x",
939                 ZoomDirection.ZOOM_OUT, ZoomRange.RATIO_1_OR_LARGER);
940     }
941 
942     /**
943      * Testing Zoom settings override performance for zoom out from 1.0x
944      *
945      * The range of zoomRatios being tested is [1.0x, minZoomRatio].
946      * The test is skipped if minZoomRatio == 1.0x.
947      */
948     @Test
testZoomSettingsOverrideLatencyOutFrom1x()949     public void testZoomSettingsOverrideLatencyOutFrom1x() throws Exception {
950         testZoomSettingsOverrideLatency("zoom_out_from_1x",
951                 ZoomDirection.ZOOM_OUT, ZoomRange.RATIO_1_OR_SMALLER);
952     }
953 
954     /**
955      * Testing Zoom settings override performance for zoom in on a camera with ultrawide lens
956      *
957      * The range of zoomRatios being tested is [minZoomRatio, maxZoomRatio].
958      * The test is skipped if minZoomRatio == 1.0x.
959      */
960     @Test
testZoomSettingsOverrideLatencyInWithUltraWide()961     public void testZoomSettingsOverrideLatencyInWithUltraWide() throws Exception {
962         testZoomSettingsOverrideLatency("zoom_in_from_ultrawide",
963                 ZoomDirection.ZOOM_IN, ZoomRange.RATIO_FULL_RANGE);
964     }
965 
966     /**
967      * Testing Zoom settings override performance for zoom out on a camera with ultrawide lens
968      *
969      * The range of zoomRatios being tested is [maxZoomRatio, minZoomRatio].
970      * The test is skipped if minZoomRatio == 1.0x.
971      */
972     @Test
testZoomSettingsOverrideLatencyOutWithUltraWide()973     public void testZoomSettingsOverrideLatencyOutWithUltraWide() throws Exception {
974         testZoomSettingsOverrideLatency("zoom_out_to_ultrawide",
975                 ZoomDirection.ZOOM_OUT, ZoomRange.RATIO_FULL_RANGE);
976     }
977 
978     /**
979      * This test measures the zoom latency improvement for devices supporting zoom settings
980      * override.
981      */
testZoomSettingsOverrideLatency(String testCase, ZoomDirection direction, ZoomRange range)982     private void testZoomSettingsOverrideLatency(String testCase,
983             ZoomDirection direction, ZoomRange range) throws Exception {
984         final int ZOOM_STEPS = 5;
985         final float ZOOM_ERROR_MARGIN = 0.05f;
986         final int ZOOM_IN_MIN_IMPROVEMENT_IN_FRAMES = 1;
987         for (String id : mTestRule.getCameraIdsUnderTest()) {
988             StaticMetadata staticMetadata = mTestRule.getAllStaticInfo().get(id);
989             CameraCharacteristics ch = staticMetadata.getCharacteristics();
990 
991             if (!staticMetadata.isColorOutputSupported()) {
992                 continue;
993             }
994 
995             if (!staticMetadata.isZoomSettingsOverrideSupported()) {
996                 continue;
997             }
998 
999             // Figure out start and end zoom ratio
1000             Range<Float> zoomRatioRange = staticMetadata.getZoomRatioRangeChecked();
1001             float startRatio = zoomRatioRange.getLower();
1002             float endRatio = zoomRatioRange.getUpper();
1003             if (startRatio >= 1.0f && (range == ZoomRange.RATIO_FULL_RANGE
1004                     || range == ZoomRange.RATIO_1_OR_SMALLER)) {
1005                 continue;
1006             }
1007             if (range == ZoomRange.RATIO_1_OR_LARGER) {
1008                 startRatio = 1.0f;
1009             } else if (range == ZoomRange.RATIO_1_OR_SMALLER) {
1010                 endRatio = 1.0f;
1011             }
1012             if (direction == ZoomDirection.ZOOM_OUT) {
1013                 float temp = startRatio;
1014                 startRatio = endRatio;
1015                 endRatio = temp;
1016             }
1017 
1018             int[] overrideImprovements = new int[NUM_ZOOM_STEPS];
1019             float[] zoomRatios = new float[NUM_ZOOM_STEPS];
1020 
1021             String streamName = "test_camera_zoom_override_latency";
1022             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
1023             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
1024             mReportLog.addValue("zoom_test_case", testCase, ResultType.NEUTRAL, ResultUnit.NONE);
1025 
1026             try {
1027                 mTestRule.openDevice(id);
1028                 mPreviewSize = mTestRule.getOrderedPreviewSizes().get(0);
1029                 updatePreviewSurface(mPreviewSize);
1030 
1031                 // Start viewfinder with settings override set and the starting zoom ratio,
1032                 // and wait for some number of frames.
1033                 CaptureRequest.Builder previewBuilder = configurePreviewOutputs(id);
1034                 previewBuilder.set(CaptureRequest.CONTROL_SETTINGS_OVERRIDE,
1035                         CameraMetadata.CONTROL_SETTINGS_OVERRIDE_ZOOM);
1036                 previewBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, startRatio);
1037                 SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1038                 int sequenceId = mTestRule.getCameraSession().setRepeatingRequest(
1039                         previewBuilder.build(), resultListener, mTestRule.getHandler());
1040                 CaptureResult result = CameraTestUtils.waitForNumResults(
1041                         resultListener, NUM_RESULTS_WAIT, WAIT_FOR_RESULT_TIMEOUT_MS);
1042 
1043                 float previousRatio = startRatio;
1044                 for (int j = 0; j < NUM_ZOOM_STEPS; j++) {
1045                     float zoomFactor = startRatio + (endRatio - startRatio)
1046                              * (j + 1) / NUM_ZOOM_STEPS;
1047                     previewBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomFactor);
1048                     int newSequenceId = mTestRule.getCameraSession().setRepeatingRequest(
1049                             previewBuilder.build(), resultListener, mTestRule.getHandler());
1050                     long lastFrameNumberForRequest =
1051                             resultListener.getCaptureSequenceLastFrameNumber(sequenceId,
1052                                     WAIT_FOR_RESULT_TIMEOUT_MS);
1053 
1054                     int improvement = 0;
1055                     long frameNumber = -1;
1056                     Log.v(TAG, "LastFrameNumber for sequence " + sequenceId + ": "
1057                             + lastFrameNumberForRequest);
1058                     while (frameNumber < lastFrameNumberForRequest + 1) {
1059                         TotalCaptureResult zoomResult = resultListener.getTotalCaptureResult(
1060                                 WAIT_FOR_RESULT_TIMEOUT_MS);
1061                         frameNumber = zoomResult.getFrameNumber();
1062                         float resultZoomFactor = zoomResult.get(CaptureResult.CONTROL_ZOOM_RATIO);
1063 
1064                         Log.v(TAG, "frameNumber " + frameNumber + " zoom: " + resultZoomFactor);
1065                         assertTrue(String.format("Zoom ratio should monotonically increase/decrease"
1066                                 + " or stay the same (previous = %f, current = %f", previousRatio,
1067                                 resultZoomFactor),
1068                                 Math.abs(previousRatio - resultZoomFactor) < ZOOM_ERROR_MARGIN
1069                                 || (direction == ZoomDirection.ZOOM_IN
1070                                         && previousRatio < resultZoomFactor)
1071                                 || (direction == ZoomDirection.ZOOM_OUT
1072                                         && previousRatio > resultZoomFactor));
1073 
1074                         if (Math.abs(resultZoomFactor - zoomFactor) < ZOOM_ERROR_MARGIN
1075                                 && improvement == 0) {
1076                             improvement = (int) (lastFrameNumberForRequest + 1 - frameNumber);
1077                         }
1078                         previousRatio = resultZoomFactor;
1079                     }
1080 
1081                     // Zoom in from 1.0x must have at least 1 frame latency improvement.
1082                     if (direction == ZoomDirection.ZOOM_IN
1083                             && range == ZoomRange.RATIO_1_OR_LARGER
1084                             && staticMetadata.isPerFrameControlSupported()) {
1085                         mTestRule.getCollector().expectTrue(
1086                                 "Zoom-in latency improvement (" + improvement
1087                                 + ") must be at least " + ZOOM_IN_MIN_IMPROVEMENT_IN_FRAMES,
1088                                 improvement >= ZOOM_IN_MIN_IMPROVEMENT_IN_FRAMES);
1089                     }
1090                     zoomRatios[j] = zoomFactor;
1091                     overrideImprovements[j] = improvement;
1092 
1093                     sequenceId = newSequenceId;
1094                 }
1095 
1096                 mReportLog.addValues("Camera zoom ratios", zoomRatios, ResultType.NEUTRAL,
1097                         ResultUnit.NONE);
1098                 mReportLog.addValues("Latency improvements", overrideImprovements,
1099                         ResultType.HIGHER_BETTER, ResultUnit.FRAMES);
1100             } finally {
1101                 mTestRule.closeDefaultImageReader();
1102                 mTestRule.closeDevice(id);
1103                 closePreviewSurface();
1104             }
1105             mReportLog.submit(mInstrumentation);
1106 
1107             if (VERBOSE) {
1108                 Log.v(TAG, "Camera " + id + " zoom settings: " + Arrays.toString(zoomRatios));
1109                 Log.v(TAG, "Camera " + id + " zoom settings override latency improvements "
1110                         + "(in frames): " + Arrays.toString(overrideImprovements));
1111             }
1112         }
1113     }
1114 
1115     /**
1116      * Testing SurfaceView jitter reduction performance
1117      *
1118      * Because the application doesn't have access to SurfaceView frames,
1119      * we use an ImageReader with COMPOSER_OVERLAY usage.
1120      */
1121     @Test
1122     public void testSurfaceViewJitterReduction() throws Exception {
1123         String cameraId = null;
1124         Range<Integer>[] aeFpsRanges = null;
1125         for (String id : mTestRule.getCameraIdsUnderTest()) {
1126             StaticMetadata staticMetadata = mTestRule.getAllStaticInfo().get(id);
1127             if (staticMetadata.isColorOutputSupported()) {
1128                 cameraId = id;
1129                 aeFpsRanges = staticMetadata.getAeAvailableTargetFpsRangesChecked();
1130                 // Because jitter reduction is a framework feature and not camera specific,
1131                 // we only test for 1 camera Id.
1132                 break;
1133             }
1134         }
1135         if (cameraId == null) {
1136             Log.i(TAG, "No camera supports color outputs, skipping");
1137             return;
1138         }
1139 
1140         try {
1141             mTestRule.openDevice(cameraId);
1142 
1143             for (Range<Integer> fpsRange : aeFpsRanges) {
1144                 if (fpsRange.getLower() == fpsRange.getUpper()) {
1145                     testPreviewJitterForFpsRange(cameraId,
1146                             HardwareBuffer.USAGE_COMPOSER_OVERLAY,
1147                             /*reduceJitter*/false, fpsRange);
1148 
1149                     testPreviewJitterForFpsRange(cameraId,
1150                             HardwareBuffer.USAGE_COMPOSER_OVERLAY,
1151                             /*reduceJitter*/true, fpsRange);
1152                 }
1153             }
1154         } finally {
1155             mTestRule.closeDevice(cameraId);
1156         }
1157     }
1158 
1159     /**
1160      * Testing SurfaceTexture jitter reduction performance
1161      */
1162     @Test
1163     public void testSurfaceTextureJitterReduction() throws Exception {
1164         String cameraId = null;
1165         Range<Integer>[] aeFpsRanges = null;
1166         for (String id : mTestRule.getCameraIdsUnderTest()) {
1167             StaticMetadata staticMetadata = mTestRule.getAllStaticInfo().get(id);
1168             if (staticMetadata.isColorOutputSupported()) {
1169                 cameraId = id;
1170                 aeFpsRanges = staticMetadata.getAeAvailableTargetFpsRangesChecked();
1171                 // Because jitter reduction is a framework feature and not camera specific,
1172                 // we only test for 1 camera Id.
1173                 break;
1174             }
1175         }
1176         if (cameraId == null) {
1177             Log.i(TAG, "No camera supports color outputs, skipping");
1178             return;
1179         }
1180 
1181         try {
1182             mTestRule.openDevice(cameraId);
1183 
1184             for (Range<Integer> fpsRange : aeFpsRanges) {
1185                 if (fpsRange.getLower() == fpsRange.getUpper()) {
1186                     testPreviewJitterForFpsRange(cameraId,
1187                             HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE,
1188                             /*reduceJitter*/false, fpsRange);
1189                     testPreviewJitterForFpsRange(cameraId,
1190                             HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE,
1191                             /*reduceJitter*/true, fpsRange);
1192                 }
1193             }
1194         } finally {
1195             mTestRule.closeDevice(cameraId);
1196         }
1197     }
1198 
1199     private void testPreviewJitterForFpsRange(String cameraId, long usage,
1200             boolean reduceJitter, Range<Integer> fpsRange) throws Exception {
1201         try {
1202             assertTrue("usage must be COMPOSER_OVERLAY/GPU_SAMPLED_IMAGE, but is " + usage,
1203                     usage == HardwareBuffer.USAGE_COMPOSER_OVERLAY
1204                     || usage == HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE);
1205             String streamName = "test_camera_preview_jitter_";
1206             if (usage == HardwareBuffer.USAGE_COMPOSER_OVERLAY) {
1207                 streamName += "surface_view";
1208             } else {
1209                 streamName += "surface_texture";
1210             }
1211             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
1212             mReportLog.addValue("camera_id", cameraId, ResultType.NEUTRAL, ResultUnit.NONE);
1213 
1214             // Display refresh rate while camera is active. Note that the default display's
1215             // getRefreshRate() isn't reflecting the real refresh rate. Hardcode it for now.
1216             float refreshRate = 60.0f;
1217             float numRefreshesPerDuration = refreshRate / fpsRange.getLower();
1218             long refreshInterval = (long) (1000000000L / refreshRate);
1219 
1220             Long frameDuration = (long) (1e9 / fpsRange.getLower());
1221             initializeImageReader(cameraId, ImageFormat.PRIVATE,
1222                     frameDuration, usage);
1223 
1224             CameraCharacteristics ch =
1225                     mTestRule.getCameraManager().getCameraCharacteristics(cameraId);
1226             Integer timestampSource = ch.get(CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE);
1227             assertNotNull("Timestamp source must not be null", timestampSource);
1228 
1229             boolean timestampIsRealtime = false;
1230             if (timestampSource == CameraMetadata.SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME
1231                     && (!reduceJitter || usage == HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE)) {
1232                 timestampIsRealtime = true;
1233             }
1234             SimpleTimestampListener imageListener =
1235                     new SimpleTimestampListener(timestampIsRealtime);
1236             mTestRule.getReader().setOnImageAvailableListener(
1237                     imageListener, mTestRule.getHandler());
1238 
1239             CaptureRequest.Builder previewBuilder = mTestRule.getCamera().createCaptureRequest(
1240                     CameraDevice.TEMPLATE_PREVIEW);
1241             previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
1242             previewBuilder.addTarget(mTestRule.getReaderSurface());
1243             CaptureRequest previewRequest = previewBuilder.build();
1244 
1245             List<OutputConfiguration> outputConfigs = new ArrayList<>();
1246             OutputConfiguration config = new OutputConfiguration(mTestRule.getReaderSurface());
1247             if (!reduceJitter) {
1248                 config.setTimestampBase(OutputConfiguration.TIMESTAMP_BASE_SENSOR);
1249             }
1250             outputConfigs.add(config);
1251 
1252             boolean useSessionKeys = isFpsRangeASessionKey(ch);
1253             mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1254             configureAndSetCameraSessionWithConfigs(outputConfigs, useSessionKeys, previewRequest);
1255 
1256             // Start preview and run for 6 seconds
1257             SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1258             mTestRule.getCameraSession().setRepeatingRequest(
1259                     previewRequest, resultListener, mTestRule.getHandler());
1260 
1261             Thread.sleep(6000);
1262 
1263             blockingStopRepeating();
1264 
1265             // Let N be expected number of VSYNCs between frames
1266             //
1267             // Number of frames ahead of expected VSYNC: 0.5 * VSYNC < frame duration <=
1268             // (N - 0.5) * VSYNC
1269             long framesAheadCount = 0;
1270             // Number of frames delayed past the expected VSYNC: frame duration >= (N + 0.5) * VSYNC
1271             long framesDelayedCount = 0;
1272             // Number of frames dropped: Fell into one single VSYNC
1273             long framesDroppedCount = 0;
1274             // The number of frame intervals in total
1275             long intervalCount = imageListener.getTimestampCount() - 1;
1276             assertTrue("Number of timestamp intervals must be at least 1, but is " + intervalCount,
1277                     intervalCount >= 1);
1278             // The sum of delays in ms for all frames captured
1279             double framesDelayInMs = 0;
1280 
1281             SimpleTimestampListener.TimestampHolder timestamp1 =
1282                     imageListener.getNextTimestampHolder();
1283             if (usage == HardwareBuffer.USAGE_COMPOSER_OVERLAY) {
1284                 framesDelayInMs =
1285                         Math.max(0, timestamp1.mTimestamp - timestamp1.mDeliveryTime) / 1000000;
1286             } else {
1287                 framesDelayInMs =
1288                         (timestamp1.mDeliveryTime - timestamp1.mTimestamp) / 1000000;
1289             }
1290             for (long i = 0; i < intervalCount; i++) {
1291                 SimpleTimestampListener.TimestampHolder timestamp2 =
1292                         imageListener.getNextTimestampHolder();
1293                 // The listener uses the image timestamp if it's in the future. Otherwise, use
1294                 // the current system time (image delivery time).
1295                 long presentTime2 = Math.max(timestamp2.mDeliveryTime, timestamp2.mTimestamp);
1296                 long presentTime1 = Math.max(timestamp1.mDeliveryTime, timestamp1.mTimestamp);
1297                 long frameInterval = presentTime2 - presentTime1;
1298                 if (frameInterval <= refreshInterval / 2) {
1299                     framesDroppedCount++;
1300                 } else if (frameInterval <= refreshInterval * (numRefreshesPerDuration - 0.5f)) {
1301                     framesAheadCount++;
1302                 } else if (frameInterval >=  refreshInterval * (numRefreshesPerDuration + 0.5f)) {
1303                     framesDelayedCount++;
1304                 }
1305 
1306                 if (usage == HardwareBuffer.USAGE_COMPOSER_OVERLAY) {
1307                     framesDelayInMs +=
1308                             Math.max(0, timestamp2.mTimestamp - timestamp2.mDeliveryTime) / 1000000;
1309                 } else {
1310                     framesDelayInMs +=
1311                             (timestamp1.mDeliveryTime - timestamp1.mTimestamp) / 1000000;
1312                 }
1313                 timestamp1 = timestamp2;
1314             }
1315 
1316             mReportLog.addValue("reduce_jitter", reduceJitter, ResultType.NEUTRAL,
1317                     ResultUnit.NONE);
1318             mReportLog.addValue("camera_configured_frame_rate", fpsRange.getLower(),
1319                     ResultType.NEUTRAL, ResultUnit.NONE);
1320             mReportLog.addValue("camera_preview_frame_dropped_rate",
1321                     1.0f * framesDroppedCount / intervalCount, ResultType.LOWER_BETTER,
1322                     ResultUnit.NONE);
1323             mReportLog.addValue("camera_preview_frame_ahead_rate",
1324                     1.0f * framesAheadCount / intervalCount, ResultType.LOWER_BETTER,
1325                     ResultUnit.NONE);
1326             mReportLog.addValue("camera_preview_frame_delayed_rate",
1327                     1.0f * framesDelayedCount / intervalCount,
1328                     ResultType.LOWER_BETTER, ResultUnit.NONE);
1329             mReportLog.addValue("camera_preview_frame_latency_ms",
1330                     framesDelayInMs / (intervalCount + 1), ResultType.LOWER_BETTER,
1331                     ResultUnit.MS);
1332 
1333             if (VERBOSE) {
1334                 Log.v(TAG, "Camera " + cameraId + " frame rate: " + fpsRange.getLower()
1335                         + ", dropped rate: " + (1.0f * framesDroppedCount / intervalCount)
1336                         + ", ahead rate: " + (1.0f * framesAheadCount / intervalCount)
1337                         + ", delayed rate: " + (1.0f * framesDelayedCount / intervalCount)
1338                         + ", latency in ms: " + (framesDelayInMs / (intervalCount + 1)));
1339             }
1340         } finally {
1341             mTestRule.closeDefaultImageReader();
1342             mReportLog.submit(mInstrumentation);
1343         }
1344     }
1345 
reprocessingCaptureStallTestByCamera(int reprocessInputFormat)1346     private void reprocessingCaptureStallTestByCamera(int reprocessInputFormat) throws Exception {
1347         prepareReprocessCapture(reprocessInputFormat);
1348 
1349         // Let it stream for a while before reprocessing
1350         startZslStreaming();
1351         waitForFrames(NUM_RESULTS_WAIT);
1352 
1353         final int NUM_REPROCESS_TESTED = MAX_REPROCESS_IMAGES / 2;
1354         // Prepare several reprocessing request
1355         Image[] inputImages = new Image[NUM_REPROCESS_TESTED];
1356         CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
1357         for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
1358             inputImages[i] =
1359                     mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1360             TotalCaptureResult zslResult =
1361                     mZslResultListener.getCaptureResult(
1362                             WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
1363             reprocessReqs[i] = mTestRule.getCamera().createReprocessCaptureRequest(zslResult);
1364             reprocessReqs[i].addTarget(mJpegReader.getSurface());
1365             reprocessReqs[i].set(CaptureRequest.NOISE_REDUCTION_MODE,
1366                     CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
1367             reprocessReqs[i].set(CaptureRequest.EDGE_MODE,
1368                     CaptureRequest.EDGE_MODE_HIGH_QUALITY);
1369             mWriter.queueInputImage(inputImages[i]);
1370         }
1371 
1372         double[] maxCaptureGapsMs = new double[NUM_REPROCESS_TESTED];
1373         double[] averageFrameDurationMs = new double[NUM_REPROCESS_TESTED];
1374         Arrays.fill(averageFrameDurationMs, 0.0);
1375         final int MAX_REPROCESS_RETURN_FRAME_COUNT = 20;
1376         SimpleCaptureCallback reprocessResultListener = new SimpleCaptureCallback();
1377         for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
1378             mZslResultListener.drain();
1379             CaptureRequest reprocessRequest = reprocessReqs[i].build();
1380             mTestRule.getCameraSession().capture(
1381                     reprocessRequest, reprocessResultListener, mTestRule.getHandler());
1382             // Wait for reprocess output jpeg and result come back.
1383             reprocessResultListener.getCaptureResultForRequest(reprocessRequest,
1384                     CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
1385             mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS).close();
1386             long numFramesMaybeStalled = mZslResultListener.getTotalNumFrames();
1387             assertTrue("Reprocess capture result should be returned in "
1388                             + MAX_REPROCESS_RETURN_FRAME_COUNT + " frames",
1389                     numFramesMaybeStalled <= MAX_REPROCESS_RETURN_FRAME_COUNT);
1390 
1391             // Need look longer time, as the stutter could happen after the reprocessing
1392             // output frame is received.
1393             long[] timestampGap = new long[MAX_REPROCESS_RETURN_FRAME_COUNT + 1];
1394             Arrays.fill(timestampGap, 0);
1395             CaptureResult[] results = new CaptureResult[timestampGap.length];
1396             long[] frameDurationsNs = new long[timestampGap.length];
1397             for (int j = 0; j < results.length; j++) {
1398                 results[j] = mZslResultListener.getCaptureResult(
1399                         CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1400                 if (j > 0) {
1401                     timestampGap[j] = results[j].get(CaptureResult.SENSOR_TIMESTAMP) -
1402                             results[j - 1].get(CaptureResult.SENSOR_TIMESTAMP);
1403                     assertTrue("Time stamp should be monotonically increasing",
1404                             timestampGap[j] > 0);
1405                 }
1406                 frameDurationsNs[j] = results[j].get(CaptureResult.SENSOR_FRAME_DURATION);
1407             }
1408 
1409             if (VERBOSE) {
1410                 Log.i(TAG, "timestampGap: " + Arrays.toString(timestampGap));
1411                 Log.i(TAG, "frameDurationsNs: " + Arrays.toString(frameDurationsNs));
1412             }
1413 
1414             // Get the number of candidate results, calculate the average frame duration
1415             // and max timestamp gap.
1416             Arrays.sort(timestampGap);
1417             double maxTimestampGapMs = timestampGap[timestampGap.length - 1] / 1000000.0;
1418             for (int m = 0; m < frameDurationsNs.length; m++) {
1419                 averageFrameDurationMs[i] += (frameDurationsNs[m] / 1000000.0);
1420             }
1421             averageFrameDurationMs[i] /= frameDurationsNs.length;
1422 
1423             maxCaptureGapsMs[i] = maxTimestampGapMs;
1424         }
1425 
1426         blockingStopRepeating();
1427 
1428         String reprocessType = "YUV reprocessing";
1429         if (reprocessInputFormat == ImageFormat.PRIVATE) {
1430             reprocessType = "opaque reprocessing";
1431         }
1432         mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL, ResultUnit.NONE);
1433         mReportLog.addValues("max_capture_timestamp_gaps", maxCaptureGapsMs,
1434                 ResultType.LOWER_BETTER, ResultUnit.MS);
1435         mReportLog.addValues("capture_average_frame_duration", averageFrameDurationMs,
1436                 ResultType.LOWER_BETTER, ResultUnit.MS);
1437         mReportLog.setSummary("camera_reprocessing_average_max_capture_timestamp_gaps",
1438                 Stat.getAverage(maxCaptureGapsMs), ResultType.LOWER_BETTER, ResultUnit.MS);
1439 
1440         // The max timestamp gap should be less than (captureStall + 1) x average frame
1441         // duration * (1 + error margin).
1442         int maxCaptureStallFrames = mTestRule.getStaticInfo().getMaxCaptureStallOrDefault();
1443         for (int i = 0; i < maxCaptureGapsMs.length; i++) {
1444             double stallDurationBound = averageFrameDurationMs[i] *
1445                     (maxCaptureStallFrames + 1) * (1 + REPROCESS_STALL_MARGIN);
1446             assertTrue("max capture stall duration should be no larger than " + stallDurationBound,
1447                     maxCaptureGapsMs[i] <= stallDurationBound);
1448         }
1449     }
1450 
reprocessingPerformanceTestByCamera(int reprocessInputFormat, boolean asyncMode, boolean requireHighQuality)1451     private void reprocessingPerformanceTestByCamera(int reprocessInputFormat, boolean asyncMode,
1452             boolean requireHighQuality)
1453             throws Exception {
1454         // Prepare the reprocessing capture
1455         prepareReprocessCapture(reprocessInputFormat);
1456 
1457         // Start ZSL streaming
1458         startZslStreaming();
1459         waitForFrames(NUM_RESULTS_WAIT);
1460 
1461         CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
1462         Image[] inputImages = new Image[MAX_REPROCESS_IMAGES];
1463         double[] getImageLatenciesMs = new double[MAX_REPROCESS_IMAGES];
1464         long startTimeMs;
1465         for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1466             inputImages[i] =
1467                     mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1468             TotalCaptureResult zslResult =
1469                     mZslResultListener.getCaptureResult(
1470                             WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
1471             reprocessReqs[i] = mTestRule.getCamera().createReprocessCaptureRequest(zslResult);
1472             if (requireHighQuality) {
1473                 // Reprocessing should support high quality for NR and edge modes.
1474                 reprocessReqs[i].set(CaptureRequest.NOISE_REDUCTION_MODE,
1475                         CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
1476                 reprocessReqs[i].set(CaptureRequest.EDGE_MODE,
1477                         CaptureRequest.EDGE_MODE_HIGH_QUALITY);
1478             }
1479             reprocessReqs[i].addTarget(mJpegReader.getSurface());
1480         }
1481 
1482         if (asyncMode) {
1483             // async capture: issue all the reprocess requests as quick as possible, then
1484             // check the throughput of the output jpegs.
1485             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1486                 // Could be slow for YUV reprocessing, do it in advance.
1487                 mWriter.queueInputImage(inputImages[i]);
1488             }
1489 
1490             // Submit the requests
1491             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1492                 mTestRule.getCameraSession().capture(reprocessReqs[i].build(), null, null);
1493             }
1494 
1495             // Get images
1496             startTimeMs = SystemClock.elapsedRealtime();
1497             Image jpegImages[] = new Image[MAX_REPROCESS_IMAGES];
1498             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1499                 jpegImages[i] = mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1500                 getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
1501                 startTimeMs = SystemClock.elapsedRealtime();
1502             }
1503             for (Image i : jpegImages) {
1504                 i.close();
1505             }
1506         } else {
1507             // sync capture: issue reprocess request one by one, only submit next one when
1508             // the previous capture image is returned. This is to test the back to back capture
1509             // performance.
1510             Image jpegImages[] = new Image[MAX_REPROCESS_IMAGES];
1511             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
1512                 startTimeMs = SystemClock.elapsedRealtime();
1513                 mWriter.queueInputImage(inputImages[i]);
1514                 mTestRule.getCameraSession().capture(reprocessReqs[i].build(), null, null);
1515                 jpegImages[i] = mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1516                 getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
1517             }
1518             for (Image i : jpegImages) {
1519                 i.close();
1520             }
1521         }
1522 
1523         blockingStopRepeating();
1524 
1525         String reprocessType = "YUV reprocessing";
1526         if (reprocessInputFormat == ImageFormat.PRIVATE) {
1527             reprocessType = "opaque reprocessing";
1528         }
1529 
1530         // Report the performance data
1531         String captureMsg;
1532         if (asyncMode) {
1533             captureMsg = "capture latency";
1534             if (requireHighQuality) {
1535                 captureMsg += " for High Quality noise reduction and edge modes";
1536             }
1537             mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL,
1538                     ResultUnit.NONE);
1539             mReportLog.addValue("capture_message", captureMsg, ResultType.NEUTRAL,
1540                     ResultUnit.NONE);
1541             mReportLog.addValues("latency", getImageLatenciesMs, ResultType.LOWER_BETTER,
1542                     ResultUnit.MS);
1543             mReportLog.setSummary("camera_reprocessing_average_latency",
1544                     Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER, ResultUnit.MS);
1545         } else {
1546             captureMsg = "shot to shot latency";
1547             if (requireHighQuality) {
1548                 captureMsg += " for High Quality noise reduction and edge modes";
1549             }
1550             mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL,
1551                     ResultUnit.NONE);
1552             mReportLog.addValue("capture_message", captureMsg, ResultType.NEUTRAL,
1553                     ResultUnit.NONE);
1554             mReportLog.addValues("latency", getImageLatenciesMs, ResultType.LOWER_BETTER,
1555                     ResultUnit.MS);
1556             mReportLog.setSummary("camera_reprocessing_shot_to_shot_average_latency",
1557                     Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER, ResultUnit.MS);
1558         }
1559     }
1560 
1561     /**
1562      * Start preview and ZSL streaming
1563      */
startZslStreaming()1564     private void startZslStreaming() throws Exception {
1565         CaptureRequest.Builder zslBuilder =
1566                 mTestRule.getCamera().createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
1567         zslBuilder.addTarget(mPreviewSurface);
1568         zslBuilder.addTarget(mCameraZslReader.getSurface());
1569         mTestRule.getCameraSession().setRepeatingRequest(
1570                 zslBuilder.build(), mZslResultListener, mTestRule.getHandler());
1571     }
1572 
1573     /**
1574      * Wait for a certain number of frames, the images and results will be drained from the
1575      * listeners to make sure that next reprocessing can get matched results and images.
1576      *
1577      * @param numFrameWait The number of frames to wait before return, 0 means that
1578      *      this call returns immediately after streaming on.
1579      */
waitForFrames(int numFrameWait)1580     private void waitForFrames(int numFrameWait) throws Exception {
1581         if (numFrameWait < 0) {
1582             throw new IllegalArgumentException("numFrameWait " + numFrameWait +
1583                     " should be non-negative");
1584         }
1585 
1586         for (int i = 0; i < numFrameWait; i++) {
1587             mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS).close();
1588         }
1589     }
1590 
closeReaderWriters()1591     private void closeReaderWriters() {
1592         mCameraZslImageListener.drain();
1593         CameraTestUtils.closeImageReader(mCameraZslReader);
1594         mCameraZslReader = null;
1595         mJpegListener.drain();
1596         CameraTestUtils.closeImageReader(mJpegReader);
1597         mJpegReader = null;
1598         CameraTestUtils.closeImageWriter(mWriter);
1599         mWriter = null;
1600     }
1601 
prepareReprocessCapture(int inputFormat)1602     private void prepareReprocessCapture(int inputFormat)
1603             throws CameraAccessException {
1604         // 1. Find the right preview and capture sizes.
1605         Size maxPreviewSize = mTestRule.getOrderedPreviewSizes().get(0);
1606         Size[] supportedInputSizes =
1607                 mTestRule.getStaticInfo().getAvailableSizesForFormatChecked(inputFormat,
1608                         StaticMetadata.StreamDirection.Input);
1609         Size maxInputSize = CameraTestUtils.getMaxSize(supportedInputSizes);
1610         Size maxJpegSize = mTestRule.getOrderedStillSizes().get(0);
1611         updatePreviewSurface(maxPreviewSize);
1612         mZslResultListener = new SimpleCaptureCallback();
1613 
1614         // 2. Create camera output ImageReaders.
1615         // YUV/Opaque output, camera should support output with input size/format
1616         mCameraZslImageListener = new SimpleImageReaderListener(
1617                 /*asyncMode*/true, MAX_ZSL_IMAGES - MAX_REPROCESS_IMAGES);
1618         mCameraZslReader = CameraTestUtils.makeImageReader(
1619                 maxInputSize, inputFormat, MAX_ZSL_IMAGES,
1620                 mCameraZslImageListener, mTestRule.getHandler());
1621         // Jpeg reprocess output
1622         mJpegListener = new SimpleImageReaderListener();
1623         mJpegReader = CameraTestUtils.makeImageReader(
1624                 maxJpegSize, ImageFormat.JPEG, MAX_JPEG_IMAGES,
1625                 mJpegListener, mTestRule.getHandler());
1626 
1627         // create camera reprocess session
1628         List<Surface> outSurfaces = new ArrayList<Surface>();
1629         outSurfaces.add(mPreviewSurface);
1630         outSurfaces.add(mCameraZslReader.getSurface());
1631         outSurfaces.add(mJpegReader.getSurface());
1632         InputConfiguration inputConfig = new InputConfiguration(maxInputSize.getWidth(),
1633                 maxInputSize.getHeight(), inputFormat);
1634         mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1635         mTestRule.setCameraSession(CameraTestUtils.configureReprocessableCameraSession(
1636                 mTestRule.getCamera(), inputConfig, outSurfaces,
1637                 mTestRule.getCameraSessionListener(), mTestRule.getHandler()));
1638 
1639         // 3. Create ImageWriter for input
1640         mWriter = CameraTestUtils.makeImageWriter(
1641                 mTestRule.getCameraSession().getInputSurface(), MAX_INPUT_IMAGES,
1642                 /*listener*/null, /*handler*/null);
1643     }
1644 
1645     /**
1646      * Stop repeating requests for current camera and waiting for it to go back to idle, resulting
1647      * in an idle device.
1648      */
blockingStopRepeating()1649     private void blockingStopRepeating() throws Exception {
1650         stopRepeating();
1651         mTestRule.getCameraSessionListener().getStateWaiter().waitForState(
1652                 BlockingSessionCallback.SESSION_READY, CameraTestUtils.CAMERA_IDLE_TIMEOUT_MS);
1653     }
1654 
blockingStartPreview(String id, CaptureCallback listener, CaptureRequest previewRequest, SimpleImageListener imageListener)1655     private void blockingStartPreview(String id, CaptureCallback listener,
1656             CaptureRequest previewRequest, SimpleImageListener imageListener)
1657             throws Exception {
1658         mTestRule.getCameraSession().setRepeatingRequest(
1659                 previewRequest, listener, mTestRule.getHandler());
1660         imageListener.waitForImageAvailable(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
1661     }
1662 
1663     /**
1664      * Setup still capture configuration and start preview.
1665      *
1666      * @param id The camera id under test
1667      * @param previewBuilder The capture request builder to be used for preview
1668      * @param stillBuilder The capture request builder to be used for still capture
1669      * @param previewSz Preview size
1670      * @param captureSizes Still capture sizes
1671      * @param formats The single capture image formats
1672      * @param resultListener Capture result listener
1673      * @param maxNumImages The max number of images set to the image reader
1674      * @param imageListeners The single capture capture image listeners
1675      * @param enablePostView Enable post view as part of the still capture request
1676      */
prepareStillCaptureAndStartPreview(String id, CaptureRequest.Builder previewBuilder, CaptureRequest.Builder stillBuilder, Size previewSz, Size[] captureSizes, int[] formats, CaptureCallback resultListener, int maxNumImages, ImageReader.OnImageAvailableListener[] imageListeners, boolean enablePostView)1677     private ImageReader[] prepareStillCaptureAndStartPreview(String id,
1678             CaptureRequest.Builder previewBuilder, CaptureRequest.Builder stillBuilder,
1679             Size previewSz, Size[] captureSizes, int[] formats, CaptureCallback resultListener,
1680             int maxNumImages, ImageReader.OnImageAvailableListener[] imageListeners,
1681             boolean enablePostView)
1682             throws Exception {
1683 
1684         if ((captureSizes == null) || (formats == null) || (imageListeners == null) &&
1685                 (captureSizes.length != formats.length) ||
1686                 (formats.length != imageListeners.length)) {
1687             throw new IllegalArgumentException("Invalid capture sizes/formats or image listeners!");
1688         }
1689 
1690         if (VERBOSE) {
1691             Log.v(TAG, String.format("Prepare still capture and preview (%s)",
1692                     previewSz.toString()));
1693         }
1694 
1695         // Update preview size.
1696         updatePreviewSurface(previewSz);
1697 
1698         ImageReader[] readers = new ImageReader[captureSizes.length];
1699         List<Surface> outputSurfaces = new ArrayList<Surface>();
1700         outputSurfaces.add(mPreviewSurface);
1701         for (int i = 0; i < captureSizes.length; i++) {
1702             readers[i] = CameraTestUtils.makeImageReader(captureSizes[i], formats[i], maxNumImages,
1703                     imageListeners[i], mTestRule.getHandler());
1704             outputSurfaces.add(readers[i].getSurface());
1705         }
1706 
1707         // Configure the requests.
1708         previewBuilder.addTarget(mPreviewSurface);
1709         if (enablePostView)
1710             stillBuilder.addTarget(mPreviewSurface);
1711         for (int i = 0; i < readers.length; i++) {
1712             stillBuilder.addTarget(readers[i].getSurface());
1713         }
1714 
1715         // Update target fps based on the min frame duration of preview.
1716         CameraCharacteristics ch = mTestRule.getStaticInfo().getCharacteristics();
1717         StreamConfigurationMap config = ch.get(
1718                 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
1719         long minFrameDuration = Math.max(FRAME_DURATION_NS_30FPS, config.getOutputMinFrameDuration(
1720                 SurfaceTexture.class, previewSz));
1721         Range<Integer> targetRange =
1722                 CameraTestUtils.getSuitableFpsRangeForDuration(id,
1723                 minFrameDuration, mTestRule.getStaticInfo());
1724         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
1725         stillBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
1726 
1727         CaptureRequest previewRequest = previewBuilder.build();
1728         mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1729         boolean useSessionKeys = isFpsRangeASessionKey(ch);
1730         configureAndSetCameraSession(outputSurfaces, useSessionKeys, previewRequest);
1731 
1732         // Start preview.
1733         mTestRule.getCameraSession().setRepeatingRequest(
1734                 previewRequest, resultListener, mTestRule.getHandler());
1735 
1736         return readers;
1737     }
1738 
1739     /**
1740      * Helper function to check if TARGET_FPS_RANGE is a session parameter
1741      */
isFpsRangeASessionKey(CameraCharacteristics ch)1742     private boolean isFpsRangeASessionKey(CameraCharacteristics ch) {
1743         List<CaptureRequest.Key<?>> sessionKeys = ch.getAvailableSessionKeys();
1744         return sessionKeys != null &&
1745                 sessionKeys.contains(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE);
1746     }
1747 
1748     /**
1749      * Helper function to configure camera session using parameters provided.
1750      */
configureAndSetCameraSession(List<Surface> surfaces, boolean useInitialRequest, CaptureRequest initialRequest)1751     private void configureAndSetCameraSession(List<Surface> surfaces,
1752             boolean useInitialRequest, CaptureRequest initialRequest)
1753             throws CameraAccessException {
1754         CameraCaptureSession cameraSession;
1755         if (useInitialRequest) {
1756             cameraSession = CameraTestUtils.configureCameraSessionWithParameters(
1757                 mTestRule.getCamera(), surfaces,
1758                 mTestRule.getCameraSessionListener(), mTestRule.getHandler(),
1759                 initialRequest);
1760         } else {
1761             cameraSession = CameraTestUtils.configureCameraSession(
1762                 mTestRule.getCamera(), surfaces,
1763                 mTestRule.getCameraSessionListener(), mTestRule.getHandler());
1764         }
1765         mTestRule.setCameraSession(cameraSession);
1766     }
1767 
1768     /*
1769      * Helper function to configure camera session using parameters provided.
1770      */
configureAndSetCameraSessionWithConfigs(List<OutputConfiguration> configs, boolean useInitialRequest, CaptureRequest initialRequest)1771     private void configureAndSetCameraSessionWithConfigs(List<OutputConfiguration> configs,
1772             boolean useInitialRequest, CaptureRequest initialRequest)
1773             throws CameraAccessException {
1774         CameraCaptureSession cameraSession;
1775         if (useInitialRequest) {
1776             cameraSession = CameraTestUtils.tryConfigureCameraSessionWithConfig(
1777                 mTestRule.getCamera(), configs, initialRequest,
1778                 mTestRule.getCameraSessionListener(), mTestRule.getHandler());
1779         } else {
1780             cameraSession = CameraTestUtils.configureCameraSessionWithConfig(
1781                 mTestRule.getCamera(), configs,
1782                 mTestRule.getCameraSessionListener(), mTestRule.getHandler());
1783         }
1784         mTestRule.setCameraSession(cameraSession);
1785     }
1786 
1787     /**
1788      * Setup single capture configuration and start preview.
1789      *
1790      * @param previewBuilder The capture request builder to be used for preview
1791      * @param stillBuilder The capture request builder to be used for still capture
1792      * @param previewSz Preview size
1793      * @param captureSz Still capture size
1794      * @param format The single capture image format
1795      * @param resultListener Capture result listener
1796      * @param sessionListener Session listener
1797      * @param maxNumImages The max number of images set to the image reader
1798      * @param imageListener The single capture capture image listener
1799      * @param useSessionKeys Create capture session using session keys from previewRequest
1800      */
prepareCaptureAndStartPreview(CaptureRequest.Builder previewBuilder, CaptureRequest.Builder stillBuilder, Size previewSz, Size captureSz, int format, CaptureCallback resultListener, CameraCaptureSession.StateCallback sessionListener, int maxNumImages, ImageReader.OnImageAvailableListener imageListener, boolean useSessionKeys)1801     private void prepareCaptureAndStartPreview(CaptureRequest.Builder previewBuilder,
1802             CaptureRequest.Builder stillBuilder, Size previewSz, Size captureSz, int format,
1803             CaptureCallback resultListener, CameraCaptureSession.StateCallback sessionListener,
1804             int maxNumImages, ImageReader.OnImageAvailableListener imageListener,
1805             boolean  useSessionKeys) throws Exception {
1806         if ((captureSz == null) || (imageListener == null)) {
1807             throw new IllegalArgumentException("Invalid capture size or image listener!");
1808         }
1809 
1810         if (VERBOSE) {
1811             Log.v(TAG, String.format("Prepare single capture (%s) and preview (%s)",
1812                     captureSz.toString(), previewSz.toString()));
1813         }
1814 
1815         // Update preview size.
1816         updatePreviewSurface(previewSz);
1817 
1818         // Create ImageReader.
1819         mTestRule.createDefaultImageReader(captureSz, format, maxNumImages, imageListener);
1820 
1821         // Configure output streams with preview and jpeg streams.
1822         List<Surface> outputSurfaces = new ArrayList<Surface>();
1823         outputSurfaces.add(mPreviewSurface);
1824         outputSurfaces.add(mTestRule.getReaderSurface());
1825         if (sessionListener == null) {
1826             mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1827         } else {
1828             mTestRule.setCameraSessionListener(new BlockingSessionCallback(sessionListener));
1829         }
1830 
1831         // Configure the requests.
1832         previewBuilder.addTarget(mPreviewSurface);
1833         stillBuilder.addTarget(mPreviewSurface);
1834         stillBuilder.addTarget(mTestRule.getReaderSurface());
1835         CaptureRequest previewRequest = previewBuilder.build();
1836 
1837         configureAndSetCameraSession(outputSurfaces, useSessionKeys, previewRequest);
1838 
1839         // Start preview.
1840         mTestRule.getCameraSession().setRepeatingRequest(
1841                 previewRequest, resultListener, mTestRule.getHandler());
1842     }
1843 
1844     /**
1845      * Update the preview surface size.
1846      *
1847      * @param size The preview size to be updated.
1848      */
updatePreviewSurface(Size size)1849     private void updatePreviewSurface(Size size) {
1850         if ((mPreviewSurfaceTexture != null ) || (mPreviewSurface != null)) {
1851             closePreviewSurface();
1852         }
1853 
1854         mPreviewSurfaceTexture = new SurfaceTexture(/*random int*/ 1);
1855         mPreviewSurfaceTexture.setDefaultBufferSize(size.getWidth(), size.getHeight());
1856         mPreviewSurface = new Surface(mPreviewSurfaceTexture);
1857     }
1858 
1859     /**
1860      * Release preview surface and corresponding surface texture.
1861      */
closePreviewSurface()1862     private void closePreviewSurface() {
1863         if (mPreviewSurface != null) {
1864             mPreviewSurface.release();
1865             mPreviewSurface = null;
1866         }
1867 
1868         if (mPreviewSurfaceTexture != null) {
1869             mPreviewSurfaceTexture.release();
1870             mPreviewSurfaceTexture = null;
1871         }
1872     }
1873 
isReprocessSupported(String cameraId, int format)1874     private boolean isReprocessSupported(String cameraId, int format)
1875             throws CameraAccessException {
1876         if (format != ImageFormat.YUV_420_888 && format != ImageFormat.PRIVATE) {
1877             throw new IllegalArgumentException(
1878                     "format " + format + " is not supported for reprocessing");
1879         }
1880 
1881         StaticMetadata info = new StaticMetadata(
1882                 mTestRule.getCameraManager().getCameraCharacteristics(cameraId), CheckLevel.ASSERT,
1883                 /*collector*/ null);
1884         int cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING;
1885         if (format == ImageFormat.PRIVATE) {
1886             cap = CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING;
1887         }
1888         return info.isCapabilitySupported(cap);
1889     }
1890 
1891     /**
1892      * Stop the repeating requests of current camera.
1893      * Does _not_ wait for the device to go idle
1894      */
stopRepeating()1895     private void stopRepeating() throws Exception {
1896         // Stop repeat, wait for captures to complete, and disconnect from surfaces
1897         if (mTestRule.getCameraSession() != null) {
1898             if (VERBOSE) Log.v(TAG, "Stopping preview");
1899             mTestRule.getCameraSession().stopRepeating();
1900         }
1901     }
1902 
1903     /**
1904      * Configure reader and preview outputs and wait until done.
1905      *
1906      * @return The preview capture request
1907      */
configureReaderAndPreviewOutputs( String id, boolean isColorOutputSupported)1908     private CaptureRequest configureReaderAndPreviewOutputs(
1909             String id, boolean isColorOutputSupported)
1910             throws Exception {
1911         if (mPreviewSurface == null || mTestRule.getReaderSurface() == null) {
1912             throw new IllegalStateException("preview and reader surface must be initilized first");
1913         }
1914 
1915         // Create previewBuilder
1916         CaptureRequest.Builder previewBuilder =
1917                 mTestRule.getCamera().createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1918         if (isColorOutputSupported) {
1919             previewBuilder.addTarget(mPreviewSurface);
1920         }
1921         previewBuilder.addTarget(mTestRule.getReaderSurface());
1922 
1923 
1924         // Figure out constant target FPS range no larger than 30fps
1925         CameraCharacteristics ch = mTestRule.getStaticInfo().getCharacteristics();
1926         StreamConfigurationMap config =
1927                 ch.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
1928         long minFrameDuration = Math.max(FRAME_DURATION_NS_30FPS,
1929                 config.getOutputMinFrameDuration(mImageReaderFormat, mPreviewSize));
1930 
1931         List<Surface> outputSurfaces = new ArrayList<>();
1932         outputSurfaces.add(mTestRule.getReaderSurface());
1933         if (isColorOutputSupported) {
1934             outputSurfaces.add(mPreviewSurface);
1935             minFrameDuration = Math.max(minFrameDuration,
1936                     config.getOutputMinFrameDuration(SurfaceTexture.class, mPreviewSize));
1937         }
1938         Range<Integer> targetRange =
1939                 CameraTestUtils.getSuitableFpsRangeForDuration(id,
1940                         minFrameDuration, mTestRule.getStaticInfo());
1941         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
1942 
1943         // Create capture session
1944         boolean useSessionKeys = isFpsRangeASessionKey(ch);
1945         CaptureRequest previewRequest = previewBuilder.build();
1946         mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1947         configureAndSetCameraSession(outputSurfaces, useSessionKeys, previewRequest);
1948 
1949         return previewRequest;
1950     }
1951 
1952     /**
1953      * Configure preview outputs and wait until done.
1954      *
1955      * @return The preview capture request builder
1956      */
configurePreviewOutputs(String id)1957     private CaptureRequest.Builder configurePreviewOutputs(String id)
1958             throws Exception {
1959         if (mPreviewSurface == null) {
1960             throw new IllegalStateException("preview surface must be initialized first");
1961         }
1962 
1963         // Create previewBuilder
1964         CaptureRequest.Builder previewBuilder =
1965                 mTestRule.getCamera().createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1966         previewBuilder.addTarget(mPreviewSurface);
1967 
1968         // Figure out constant target FPS range no larger than 30fps
1969         CameraCharacteristics ch = mTestRule.getStaticInfo().getCharacteristics();
1970         StreamConfigurationMap config =
1971                 ch.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
1972         long minFrameDuration = Math.max(FRAME_DURATION_NS_30FPS,
1973                 config.getOutputMinFrameDuration(SurfaceTexture.class, mPreviewSize));
1974 
1975         List<Surface> outputSurfaces = new ArrayList<>();
1976         outputSurfaces.add(mPreviewSurface);
1977         Range<Integer> targetRange =
1978                 CameraTestUtils.getSuitableFpsRangeForDuration(id,
1979                         minFrameDuration, mTestRule.getStaticInfo());
1980         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
1981 
1982         // Create capture session
1983         boolean useSessionKeys = isFpsRangeASessionKey(ch);
1984         CaptureRequest previewRequest = previewBuilder.build();
1985         mTestRule.setCameraSessionListener(new BlockingSessionCallback());
1986         configureAndSetCameraSession(outputSurfaces, useSessionKeys, previewRequest);
1987 
1988         return previewBuilder;
1989     }
1990 
1991     /**
1992      * Initialize the ImageReader instance and preview surface.
1993      * @param cameraId The camera to be opened.
1994      * @param format The format used to create ImageReader instance.
1995      */
initializeImageReader(String cameraId, int format)1996     private void initializeImageReader(String cameraId, int format) throws Exception {
1997         initializeImageReader(cameraId, format, null/*maxFrameDuration*/, 0/*usage*/);
1998     }
1999 
2000     /**
2001      * Initialize the ImageReader instance and preview surface.
2002      * @param cameraId The camera to be opened.
2003      * @param format The format used to create ImageReader instance.
2004      * @param frameDuration The min frame duration of the ImageReader cannot be larger than
2005      *                      frameDuration.
2006      * @param usage The usage of the ImageReader
2007      */
initializeImageReader(String cameraId, int format, Long frameDuration, long usage)2008     private void initializeImageReader(String cameraId, int format, Long frameDuration, long usage)
2009             throws Exception {
2010         List<Size> boundedSizes = CameraTestUtils.getSortedSizesForFormat(
2011                 cameraId, mTestRule.getCameraManager(), format,
2012                 CameraTestUtils.getPreviewSizeBound(mTestRule.getWindowManager(),
2013                         CameraTestUtils.PREVIEW_SIZE_BOUND));
2014 
2015         // Remove the sizes not meeting the frame duration requirement.
2016         final float kFrameDurationTolerance = 0.01f;
2017         if (frameDuration != null) {
2018             StreamConfigurationMap configMap = mTestRule.getStaticInfo().getValueFromKeyNonNull(
2019                     CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
2020             ListIterator<Size> iter = boundedSizes.listIterator();
2021             while (iter.hasNext()) {
2022                 long duration = configMap.getOutputMinFrameDuration(format, iter.next());
2023                 if (duration > frameDuration * (1 + kFrameDurationTolerance)) {
2024                     iter.remove();
2025                 }
2026             }
2027         }
2028 
2029         mTestRule.setOrderedPreviewSizes(boundedSizes);
2030         mPreviewSize = mTestRule.getOrderedPreviewSizes().get(0);
2031         mImageReaderFormat = format;
2032         if (usage != 0) {
2033             mTestRule.createDefaultImageReader(
2034                     mPreviewSize, format, NUM_MAX_IMAGES, usage, /*listener*/null);
2035         } else {
2036             mTestRule.createDefaultImageReader(
2037                     mPreviewSize, format, NUM_MAX_IMAGES, /*listener*/null);
2038         }
2039     }
2040 
simpleOpenCamera(String cameraId)2041     private void simpleOpenCamera(String cameraId) throws Exception {
2042         mTestRule.setCamera(CameraTestUtils.openCamera(
2043                 mTestRule.getCameraManager(), cameraId,
2044                 mTestRule.getCameraListener(), mTestRule.getHandler()));
2045         mTestRule.getCollector().setCameraId(cameraId);
2046         mTestRule.setStaticInfo(new StaticMetadata(
2047                 mTestRule.getCameraManager().getCameraCharacteristics(cameraId),
2048                 CheckLevel.ASSERT, /*collector*/null));
2049     }
2050 
2051     /**
2052      * Simple image listener that can be used to time the availability of first image.
2053      *
2054      */
2055     private static class SimpleImageListener implements ImageReader.OnImageAvailableListener {
2056         private ConditionVariable imageAvailable = new ConditionVariable();
2057         private boolean imageReceived = false;
2058         private long mTimeReceivedImage = 0;
2059 
2060         @Override
onImageAvailable(ImageReader reader)2061         public void onImageAvailable(ImageReader reader) {
2062             Image image = null;
2063             if (!imageReceived) {
2064                 if (VERBOSE) {
2065                     Log.v(TAG, "First image arrives");
2066                 }
2067                 imageReceived = true;
2068                 mTimeReceivedImage = SystemClock.elapsedRealtime();
2069                 imageAvailable.open();
2070             }
2071             image = reader.acquireNextImage();
2072             if (image != null) {
2073                 image.close();
2074             }
2075         }
2076 
2077         /**
2078          * Wait for image available, return immediately if the image was already
2079          * received, otherwise wait until an image arrives.
2080          */
waitForImageAvailable(long timeout)2081         public void waitForImageAvailable(long timeout) {
2082             if (imageReceived) {
2083                 imageReceived = false;
2084                 return;
2085             }
2086 
2087             if (imageAvailable.block(timeout)) {
2088                 imageAvailable.close();
2089                 imageReceived = true;
2090             } else {
2091                 throw new TimeoutRuntimeException("Unable to get the first image after "
2092                         + CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS + "ms");
2093             }
2094         }
2095 
getTimeReceivedImage()2096         public long getTimeReceivedImage() {
2097             return mTimeReceivedImage;
2098         }
2099     }
2100 
2101     /**
2102      * Simple image listener that behaves like a SurfaceView.
2103      */
2104     private static class SimpleTimestampListener
2105             implements ImageReader.OnImageAvailableListener {
2106         public static class TimestampHolder {
2107             public long mDeliveryTime;
2108             public long mTimestamp;
TimestampHolder(long deliveryTime, long timestamp)2109             TimestampHolder(long deliveryTime, long timestamp) {
2110                 mDeliveryTime = deliveryTime;
2111                 mTimestamp = timestamp;
2112             }
2113         }
2114 
2115         private final boolean mUseRealtime;
2116 
2117         private final LinkedBlockingQueue<TimestampHolder> mTimestampQueue =
2118                 new LinkedBlockingQueue<TimestampHolder>();
2119 
SimpleTimestampListener(boolean timestampIsRealtime)2120         SimpleTimestampListener(boolean timestampIsRealtime) {
2121             mUseRealtime = timestampIsRealtime;
2122         }
2123 
2124         @Override
onImageAvailable(ImageReader reader)2125         public void onImageAvailable(ImageReader reader) {
2126             try {
2127                 Image image = null;
2128                 image = reader.acquireNextImage();
2129                 if (image != null) {
2130                     long timestamp = image.getTimestamp();
2131                     long currentTimeMillis = mUseRealtime
2132                             ? SystemClock.elapsedRealtime() : SystemClock.uptimeMillis();
2133                     long currentTimeNs = currentTimeMillis * 1000000;
2134                     mTimestampQueue.put(new TimestampHolder(currentTimeNs, timestamp));
2135                     image.close();
2136                 }
2137             } catch (InterruptedException e) {
2138                 throw new UnsupportedOperationException(
2139                         "Can't handle InterruptedException in onImageAvailable");
2140             }
2141         }
2142 
2143         /**
2144          * Get the number of timestamps
2145          */
getTimestampCount()2146         public int getTimestampCount() {
2147             return mTimestampQueue.size();
2148         }
2149 
2150         /**
2151          * Get the timestamps for next image received.
2152          */
getNextTimestampHolder()2153         public TimestampHolder getNextTimestampHolder() {
2154             TimestampHolder holder = mTimestampQueue.poll();
2155             return holder;
2156         }
2157     }
2158 
2159     private static class SimpleTimingResultListener
2160             extends CameraCaptureSession.CaptureCallback {
2161         private final LinkedBlockingQueue<Pair<CaptureResult, Long> > mPartialResultQueue =
2162                 new LinkedBlockingQueue<Pair<CaptureResult, Long> >();
2163         private final LinkedBlockingQueue<Pair<CaptureResult, Long> > mResultQueue =
2164                 new LinkedBlockingQueue<Pair<CaptureResult, Long> > ();
2165 
2166         @Override
onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result)2167         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
2168                 TotalCaptureResult result) {
2169             try {
2170                 Long time = SystemClock.elapsedRealtime();
2171                 mResultQueue.put(new Pair<CaptureResult, Long>(result, time));
2172             } catch (InterruptedException e) {
2173                 throw new UnsupportedOperationException(
2174                         "Can't handle InterruptedException in onCaptureCompleted");
2175             }
2176         }
2177 
2178         @Override
onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult)2179         public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
2180                 CaptureResult partialResult) {
2181             try {
2182                 // check if AE and AF state exists
2183                 Long time = -1L;
2184                 if (partialResult.get(CaptureResult.CONTROL_AE_STATE) != null &&
2185                         partialResult.get(CaptureResult.CONTROL_AF_STATE) != null) {
2186                     time = SystemClock.elapsedRealtime();
2187                 }
2188                 mPartialResultQueue.put(new Pair<CaptureResult, Long>(partialResult, time));
2189             } catch (InterruptedException e) {
2190                 throw new UnsupportedOperationException(
2191                         "Can't handle InterruptedException in onCaptureProgressed");
2192             }
2193         }
2194 
getPartialResultNTime(long timeout)2195         public Pair<CaptureResult, Long> getPartialResultNTime(long timeout) {
2196             try {
2197                 Pair<CaptureResult, Long> result =
2198                         mPartialResultQueue.poll(timeout, TimeUnit.MILLISECONDS);
2199                 return result;
2200             } catch (InterruptedException e) {
2201                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
2202             }
2203         }
2204 
getCaptureResultNTime(long timeout)2205         public Pair<CaptureResult, Long> getCaptureResultNTime(long timeout) {
2206             try {
2207                 Pair<CaptureResult, Long> result =
2208                         mResultQueue.poll(timeout, TimeUnit.MILLISECONDS);
2209                 assertNotNull("Wait for a capture result timed out in " + timeout + "ms", result);
2210                 return result;
2211             } catch (InterruptedException e) {
2212                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
2213             }
2214         }
2215 
getPartialResultNTimeForRequest(CaptureRequest myRequest, int numResultsWait)2216         public Pair<CaptureResult, Long> getPartialResultNTimeForRequest(CaptureRequest myRequest,
2217                 int numResultsWait) {
2218             if (numResultsWait < 0) {
2219                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
2220             }
2221 
2222             Pair<CaptureResult, Long> result;
2223             int i = 0;
2224             do {
2225                 result = getPartialResultNTime(CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
2226                 // The result may be null if no partials are produced on this particular path, so
2227                 // stop trying
2228                 if (result == null) break;
2229                 if (result.first.getRequest().equals(myRequest)) {
2230                     return result;
2231                 }
2232             } while (i++ < numResultsWait);
2233 
2234             // No partials produced - this may not be an error, since a given device may not
2235             // produce any partials on this testing path
2236             return null;
2237         }
2238 
getCaptureResultNTimeForRequest(CaptureRequest myRequest, int numResultsWait)2239         public Pair<CaptureResult, Long> getCaptureResultNTimeForRequest(CaptureRequest myRequest,
2240                 int numResultsWait) {
2241             if (numResultsWait < 0) {
2242                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
2243             }
2244 
2245             Pair<CaptureResult, Long> result;
2246             int i = 0;
2247             do {
2248                 result = getCaptureResultNTime(CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
2249                 if (result.first.getRequest().equals(myRequest)) {
2250                     return result;
2251                 }
2252             } while (i++ < numResultsWait);
2253 
2254             throw new TimeoutRuntimeException("Unable to get the expected capture result after "
2255                     + "waiting for " + numResultsWait + " results");
2256         }
2257 
2258     }
2259 }
2260