• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static org.mockito.Mockito.*;
20 
21 import android.graphics.Bitmap;
22 import android.graphics.BitmapFactory;
23 import android.graphics.ColorSpace;
24 import android.graphics.Gainmap;
25 import android.graphics.ImageFormat;
26 import android.graphics.PointF;
27 import android.graphics.Rect;
28 import android.graphics.SurfaceTexture;
29 import android.hardware.camera2.CameraAccessException;
30 import android.hardware.camera2.CameraCaptureSession;
31 import android.hardware.camera2.CameraCharacteristics;
32 import android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession;
33 import android.hardware.camera2.CameraDevice;
34 import android.hardware.camera2.CameraManager;
35 import android.hardware.camera2.CameraMetadata;
36 import android.hardware.camera2.CaptureFailure;
37 import android.hardware.camera2.CaptureRequest;
38 import android.hardware.camera2.CaptureResult;
39 import android.hardware.camera2.MultiResolutionImageReader;
40 import android.hardware.camera2.TotalCaptureResult;
41 import android.hardware.camera2.cts.helpers.CameraErrorCollector;
42 import android.hardware.camera2.cts.helpers.StaticMetadata;
43 import android.hardware.camera2.params.DynamicRangeProfiles;
44 import android.hardware.camera2.params.InputConfiguration;
45 import android.hardware.camera2.params.MandatoryStreamCombination.MandatoryStreamInformation;
46 import android.hardware.camera2.params.MeteringRectangle;
47 import android.hardware.camera2.params.MultiResolutionStreamConfigurationMap;
48 import android.hardware.camera2.params.MultiResolutionStreamInfo;
49 import android.hardware.camera2.params.OutputConfiguration;
50 import android.hardware.camera2.params.SessionConfiguration;
51 import android.hardware.camera2.params.StreamConfigurationMap;
52 import android.hardware.cts.helpers.CameraUtils;
53 import android.location.Location;
54 import android.location.LocationManager;
55 import android.media.ExifInterface;
56 import android.media.Image;
57 import android.media.Image.Plane;
58 import android.media.ImageReader;
59 import android.media.ImageWriter;
60 import android.os.Build;
61 import android.os.ConditionVariable;
62 import android.os.Handler;
63 import android.os.Looper;
64 import android.util.Log;
65 import android.util.Pair;
66 import android.util.Range;
67 import android.util.Size;
68 import android.view.Surface;
69 import android.view.WindowManager;
70 import android.view.WindowMetrics;
71 
72 import androidx.annotation.NonNull;
73 
74 import com.android.ex.camera2.blocking.BlockingCameraManager;
75 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
76 import com.android.ex.camera2.blocking.BlockingSessionCallback;
77 import com.android.ex.camera2.blocking.BlockingStateCallback;
78 import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
79 
80 import junit.framework.Assert;
81 
82 import org.mockito.ArgumentCaptor;
83 import org.mockito.InOrder;
84 import org.mockito.Mockito;
85 
86 import java.io.FileOutputStream;
87 import java.io.IOException;
88 import java.lang.reflect.Array;
89 import java.nio.ByteBuffer;
90 import java.text.ParseException;
91 import java.text.SimpleDateFormat;
92 import java.util.ArrayList;
93 import java.util.Arrays;
94 import java.util.Collection;
95 import java.util.Collections;
96 import java.util.Comparator;
97 import java.util.Date;
98 import java.util.HashMap;
99 import java.util.HashSet;
100 import java.util.Iterator;
101 import java.util.List;
102 import java.util.Optional;
103 import java.util.Random;
104 import java.util.Set;
105 import java.util.concurrent.Executor;
106 import java.util.concurrent.LinkedBlockingQueue;
107 import java.util.concurrent.Semaphore;
108 import java.util.concurrent.TimeUnit;
109 import java.util.concurrent.atomic.AtomicLong;
110 
111 /**
112  * A package private utility class for wrapping up the camera2 cts test common utility functions
113  */
114 public class CameraTestUtils extends Assert {
115     private static final String TAG = "CameraTestUtils";
116     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
117     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
118     public static final Size SIZE_BOUND_720P = new Size(1280, 720);
119     public static final Size SIZE_BOUND_1080P = new Size(1920, 1088);
120     public static final Size SIZE_BOUND_2K = new Size(2048, 1088);
121     public static final Size SIZE_BOUND_QHD = new Size(2560, 1440);
122     public static final Size SIZE_BOUND_2160P = new Size(3840, 2160);
123     // Only test the preview size that is no larger than 1080p.
124     public static final Size PREVIEW_SIZE_BOUND = SIZE_BOUND_1080P;
125     // Default timeouts for reaching various states
126     public static final int CAMERA_OPEN_TIMEOUT_MS = 3000;
127     public static final int CAMERA_CLOSE_TIMEOUT_MS = 3000;
128     public static final int CAMERA_IDLE_TIMEOUT_MS = 3000;
129     public static final int CAMERA_ACTIVE_TIMEOUT_MS = 1000;
130     public static final int CAMERA_BUSY_TIMEOUT_MS = 1000;
131     public static final int CAMERA_UNCONFIGURED_TIMEOUT_MS = 1000;
132     public static final int CAMERA_CONFIGURE_TIMEOUT_MS = 3000;
133     public static final int CAPTURE_RESULT_TIMEOUT_MS = 3000;
134     public static final int CAPTURE_IMAGE_TIMEOUT_MS = 3000;
135 
136     public static final int SESSION_CONFIGURE_TIMEOUT_MS = 3000;
137     public static final int SESSION_CLOSE_TIMEOUT_MS = 3000;
138     public static final int SESSION_READY_TIMEOUT_MS = 5000;
139     public static final int SESSION_ACTIVE_TIMEOUT_MS = 1000;
140 
141     public static final int MAX_READER_IMAGES = 5;
142 
143     public static final int INDEX_ALGORITHM_AE = 0;
144     public static final int INDEX_ALGORITHM_AWB = 1;
145     public static final int INDEX_ALGORITHM_AF = 2;
146     public static final int NUM_ALGORITHMS = 3; // AE, AWB and AF
147 
148     // Compensate for the loss of "sensitivity" and "sensitivityBoost"
149     public static final int MAX_ISO_MISMATCH = 3;
150 
151     public static final String OFFLINE_CAMERA_ID = "offline_camera_id";
152     public static final String REPORT_LOG_NAME = "CtsCameraTestCases";
153 
154     private static final int EXIF_DATETIME_LENGTH = 19;
155     private static final int EXIF_DATETIME_ERROR_MARGIN_SEC = 60;
156     private static final float EXIF_FOCAL_LENGTH_ERROR_MARGIN = 0.001f;
157     private static final float EXIF_EXPOSURE_TIME_ERROR_MARGIN_RATIO = 0.05f;
158     private static final float EXIF_EXPOSURE_TIME_MIN_ERROR_MARGIN_SEC = 0.002f;
159     private static final float EXIF_APERTURE_ERROR_MARGIN = 0.001f;
160 
161     private static final float ZOOM_RATIO_THRESHOLD = 0.01f;
162 
163     private static final int AVAILABILITY_TIMEOUT_MS = 10;
164 
165     private static final Location sTestLocation0 = new Location(LocationManager.GPS_PROVIDER);
166     private static final Location sTestLocation1 = new Location(LocationManager.GPS_PROVIDER);
167     private static final Location sTestLocation2 = new Location(LocationManager.NETWORK_PROVIDER);
168 
169     static {
170         sTestLocation0.setTime(1199145600000L);
171         sTestLocation0.setLatitude(37.736071);
172         sTestLocation0.setLongitude(-122.441983);
173         sTestLocation0.setAltitude(21.0);
174 
175         sTestLocation1.setTime(1199145601000L);
176         sTestLocation1.setLatitude(0.736071);
177         sTestLocation1.setLongitude(0.441983);
178         sTestLocation1.setAltitude(1.0);
179 
180         sTestLocation2.setTime(1199145602000L);
181         sTestLocation2.setLatitude(-89.736071);
182         sTestLocation2.setLongitude(-179.441983);
183         sTestLocation2.setAltitude(100000.0);
184     }
185 
186     // Exif test data vectors.
187     public static final ExifTestData[] EXIF_TEST_DATA = {
188             new ExifTestData(
189                     /*gpsLocation*/ sTestLocation0,
190                     /* orientation */90,
191                     /* jpgQuality */(byte) 80,
192                     /* thumbQuality */(byte) 75),
193             new ExifTestData(
194                     /*gpsLocation*/ sTestLocation1,
195                     /* orientation */180,
196                     /* jpgQuality */(byte) 90,
197                     /* thumbQuality */(byte) 85),
198             new ExifTestData(
199                     /*gpsLocation*/ sTestLocation2,
200                     /* orientation */270,
201                     /* jpgQuality */(byte) 100,
202                     /* thumbQuality */(byte) 80)
203     };
204 
205     /**
206      * Create an {@link android.media.ImageReader} object and get the surface.
207      *
208      * @param size The size of this ImageReader to be created.
209      * @param format The format of this ImageReader to be created
210      * @param maxNumImages The max number of images that can be acquired simultaneously.
211      * @param listener The listener used by this ImageReader to notify callbacks.
212      * @param handler The handler to use for any listener callbacks.
213      */
makeImageReader(Size size, int format, int maxNumImages, ImageReader.OnImageAvailableListener listener, Handler handler)214     public static ImageReader makeImageReader(Size size, int format, int maxNumImages,
215             ImageReader.OnImageAvailableListener listener, Handler handler) {
216         ImageReader reader;
217         reader = ImageReader.newInstance(size.getWidth(), size.getHeight(), format,
218                 maxNumImages);
219         reader.setOnImageAvailableListener(listener, handler);
220         if (VERBOSE) Log.v(TAG, "Created ImageReader size " + size);
221         return reader;
222     }
223 
224     /**
225      * Create an ImageWriter and hook up the ImageListener.
226      *
227      * @param inputSurface The input surface of the ImageWriter.
228      * @param maxImages The max number of Images that can be dequeued simultaneously.
229      * @param listener The listener used by this ImageWriter to notify callbacks
230      * @param handler The handler to post listener callbacks.
231      * @return ImageWriter object created.
232      */
makeImageWriter( Surface inputSurface, int maxImages, ImageWriter.OnImageReleasedListener listener, Handler handler)233     public static ImageWriter makeImageWriter(
234             Surface inputSurface, int maxImages,
235             ImageWriter.OnImageReleasedListener listener, Handler handler) {
236         ImageWriter writer = ImageWriter.newInstance(inputSurface, maxImages);
237         writer.setOnImageReleasedListener(listener, handler);
238         return writer;
239     }
240 
241     /**
242      * Utility class to store the targets for mandatory stream combination test.
243      */
244     public static class StreamCombinationTargets {
245         public List<SurfaceTexture> mPrivTargets = new ArrayList<>();
246         public List<ImageReader> mJpegTargets = new ArrayList<>();
247         public List<ImageReader> mYuvTargets = new ArrayList<>();
248         public List<ImageReader> mY8Targets = new ArrayList<>();
249         public List<ImageReader> mRawTargets = new ArrayList<>();
250         public List<ImageReader> mHeicTargets = new ArrayList<>();
251         public List<ImageReader> mDepth16Targets = new ArrayList<>();
252         public List<ImageReader> mP010Targets = new ArrayList<>();
253 
254 
255         public List<MultiResolutionImageReader> mPrivMultiResTargets = new ArrayList<>();
256         public List<MultiResolutionImageReader> mJpegMultiResTargets = new ArrayList<>();
257         public List<MultiResolutionImageReader> mYuvMultiResTargets = new ArrayList<>();
258         public List<MultiResolutionImageReader> mRawMultiResTargets = new ArrayList<>();
259 
close()260         public void close() {
261             for (SurfaceTexture target : mPrivTargets) {
262                 target.release();
263             }
264             for (ImageReader target : mJpegTargets) {
265                 target.close();
266             }
267             for (ImageReader target : mYuvTargets) {
268                 target.close();
269             }
270             for (ImageReader target : mY8Targets) {
271                 target.close();
272             }
273             for (ImageReader target : mRawTargets) {
274                 target.close();
275             }
276             for (ImageReader target : mHeicTargets) {
277                 target.close();
278             }
279             for (ImageReader target : mDepth16Targets) {
280                 target.close();
281             }
282             for (ImageReader target : mP010Targets) {
283                 target.close();
284             }
285 
286             for (MultiResolutionImageReader target : mPrivMultiResTargets) {
287                 target.close();
288             }
289             for (MultiResolutionImageReader target : mJpegMultiResTargets) {
290                 target.close();
291             }
292             for (MultiResolutionImageReader target : mYuvMultiResTargets) {
293                 target.close();
294             }
295             for (MultiResolutionImageReader target : mRawMultiResTargets) {
296                 target.close();
297             }
298         }
299     }
300 
configureTarget(StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, int format, Size targetSize, int numBuffers, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, boolean createMultiResiStreamConfig, ImageDropperListener listener, Handler handler, long dynamicRangeProfile, long streamUseCase)301     private static void configureTarget(StreamCombinationTargets targets,
302             List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces,
303             int format, Size targetSize, int numBuffers, String overridePhysicalCameraId,
304             MultiResolutionStreamConfigurationMap multiResStreamConfig,
305             boolean createMultiResiStreamConfig, ImageDropperListener listener, Handler handler,
306             long dynamicRangeProfile, long streamUseCase) {
307         if (createMultiResiStreamConfig) {
308             Collection<MultiResolutionStreamInfo> multiResolutionStreams =
309                     multiResStreamConfig.getOutputInfo(format);
310             MultiResolutionImageReader multiResReader = new MultiResolutionImageReader(
311                     multiResolutionStreams, format, numBuffers);
312             multiResReader.setOnImageAvailableListener(listener, new HandlerExecutor(handler));
313             Collection<OutputConfiguration> configs =
314                     OutputConfiguration.createInstancesForMultiResolutionOutput(multiResReader);
315             outputConfigs.addAll(configs);
316             outputSurfaces.add(multiResReader.getSurface());
317             switch (format) {
318                 case ImageFormat.PRIVATE:
319                     targets.mPrivMultiResTargets.add(multiResReader);
320                     break;
321                 case ImageFormat.JPEG:
322                     targets.mJpegMultiResTargets.add(multiResReader);
323                     break;
324                 case ImageFormat.YUV_420_888:
325                     targets.mYuvMultiResTargets.add(multiResReader);
326                     break;
327                 case ImageFormat.RAW_SENSOR:
328                     targets.mRawMultiResTargets.add(multiResReader);
329                     break;
330                 default:
331                     fail("Unknown/Unsupported output format " + format);
332             }
333         } else {
334             if (format == ImageFormat.PRIVATE) {
335                 SurfaceTexture target = new SurfaceTexture(/*random int*/1);
336                 target.setDefaultBufferSize(targetSize.getWidth(), targetSize.getHeight());
337                 OutputConfiguration config = new OutputConfiguration(new Surface(target));
338                 if (overridePhysicalCameraId != null) {
339                     config.setPhysicalCameraId(overridePhysicalCameraId);
340                 }
341                 config.setDynamicRangeProfile(dynamicRangeProfile);
342                 config.setStreamUseCase(streamUseCase);
343                 outputConfigs.add(config);
344                 outputSurfaces.add(config.getSurface());
345                 targets.mPrivTargets.add(target);
346             } else {
347                 ImageReader target = ImageReader.newInstance(targetSize.getWidth(),
348                         targetSize.getHeight(), format, numBuffers);
349                 target.setOnImageAvailableListener(listener, handler);
350                 OutputConfiguration config = new OutputConfiguration(target.getSurface());
351                 if (overridePhysicalCameraId != null) {
352                     config.setPhysicalCameraId(overridePhysicalCameraId);
353                 }
354                 config.setDynamicRangeProfile(dynamicRangeProfile);
355                 config.setStreamUseCase(streamUseCase);
356                 outputConfigs.add(config);
357                 outputSurfaces.add(config.getSurface());
358 
359                 switch (format) {
360                     case ImageFormat.JPEG:
361                       targets.mJpegTargets.add(target);
362                       break;
363                     case ImageFormat.YUV_420_888:
364                       targets.mYuvTargets.add(target);
365                       break;
366                     case ImageFormat.Y8:
367                       targets.mY8Targets.add(target);
368                       break;
369                     case ImageFormat.RAW_SENSOR:
370                       targets.mRawTargets.add(target);
371                       break;
372                     case ImageFormat.HEIC:
373                       targets.mHeicTargets.add(target);
374                       break;
375                     case ImageFormat.DEPTH16:
376                       targets.mDepth16Targets.add(target);
377                       break;
378                     case ImageFormat.YCBCR_P010:
379                       targets.mP010Targets.add(target);
380                       break;
381                     default:
382                       fail("Unknown/Unsupported output format " + format);
383                 }
384             }
385         }
386     }
387 
setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridenPhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler)388     public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo,
389             StreamCombinationTargets targets,
390             List<OutputConfiguration> outputConfigs,
391             List<Surface> outputSurfaces, int numBuffers,
392             boolean substituteY8, boolean substituteHeic, String overridenPhysicalCameraId,
393             MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler) {
394             List<Surface> uhSurfaces = new ArrayList<Surface>();
395         setupConfigurationTargets(streamsInfo, targets, outputConfigs, outputSurfaces, uhSurfaces,
396             numBuffers, substituteY8, substituteHeic, overridenPhysicalCameraId,
397             multiResStreamConfig, handler);
398     }
399 
setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler)400     public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo,
401             StreamCombinationTargets targets,
402             List<OutputConfiguration> outputConfigs,
403             List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers,
404             boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId,
405             MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler) {
406         setupConfigurationTargets(streamsInfo, targets, outputConfigs, outputSurfaces, uhSurfaces,
407                 numBuffers, substituteY8, substituteHeic, overridePhysicalCameraId,
408                 multiResStreamConfig, handler, /*dynamicRangeProfiles*/ null);
409     }
410 
setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler, List<Long> dynamicRangeProfiles)411     public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo,
412             StreamCombinationTargets targets,
413             List<OutputConfiguration> outputConfigs,
414             List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers,
415             boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId,
416             MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler,
417             List<Long> dynamicRangeProfiles) {
418 
419         Random rnd = new Random();
420         // 10-bit output capable streams will use a fixed dynamic range profile in case
421         // dynamicRangeProfiles.size() == 1 or random in case dynamicRangeProfiles.size() > 1
422         boolean use10BitRandomProfile = (dynamicRangeProfiles != null) &&
423                 (dynamicRangeProfiles.size() > 1);
424         if (use10BitRandomProfile) {
425             Long seed = rnd.nextLong();
426             Log.i(TAG, "Random seed used for selecting 10-bit output: " + seed);
427             rnd.setSeed(seed);
428         }
429         ImageDropperListener imageDropperListener = new ImageDropperListener();
430         List<Surface> chosenSurfaces;
431         for (MandatoryStreamInformation streamInfo : streamsInfo) {
432             if (streamInfo.isInput()) {
433                 continue;
434             }
435             chosenSurfaces = outputSurfaces;
436             if (streamInfo.isUltraHighResolution()) {
437                 chosenSurfaces = uhSurfaces;
438             }
439             int format = streamInfo.getFormat();
440             if (substituteY8 && (format == ImageFormat.YUV_420_888)) {
441                 format = ImageFormat.Y8;
442             } else if (substituteHeic && (format == ImageFormat.JPEG)) {
443                 format = ImageFormat.HEIC;
444             }
445 
446             long dynamicRangeProfile = DynamicRangeProfiles.STANDARD;
447             if (streamInfo.is10BitCapable() && use10BitRandomProfile) {
448                 boolean override10bit = rnd.nextBoolean();
449                 if (!override10bit) {
450                     dynamicRangeProfile = dynamicRangeProfiles.get(rnd.nextInt(
451                             dynamicRangeProfiles.size()));
452                     format = streamInfo.get10BitFormat();
453                 }
454             } else if (streamInfo.is10BitCapable() && (dynamicRangeProfiles != null)) {
455                 dynamicRangeProfile = dynamicRangeProfiles.get(0);
456                 format = streamInfo.get10BitFormat();
457             }
458             Size[] availableSizes = new Size[streamInfo.getAvailableSizes().size()];
459             availableSizes = streamInfo.getAvailableSizes().toArray(availableSizes);
460             Size targetSize = CameraTestUtils.getMaxSize(availableSizes);
461             boolean createMultiResReader =
462                     (multiResStreamConfig != null &&
463                      !multiResStreamConfig.getOutputInfo(format).isEmpty() &&
464                      streamInfo.isMaximumSize());
465             switch (format) {
466                 case ImageFormat.PRIVATE:
467                 case ImageFormat.JPEG:
468                 case ImageFormat.YUV_420_888:
469                 case ImageFormat.YCBCR_P010:
470                 case ImageFormat.Y8:
471                 case ImageFormat.HEIC:
472                 case ImageFormat.DEPTH16:
473                 {
474                     configureTarget(targets, outputConfigs, chosenSurfaces, format,
475                             targetSize, numBuffers, overridePhysicalCameraId, multiResStreamConfig,
476                             createMultiResReader, imageDropperListener, handler,
477                             dynamicRangeProfile, streamInfo.getStreamUseCase());
478                     break;
479                 }
480                 case ImageFormat.RAW_SENSOR: {
481                     // targetSize could be null in the logical camera case where only
482                     // physical camera supports RAW stream.
483                     if (targetSize != null) {
484                         configureTarget(targets, outputConfigs, chosenSurfaces, format,
485                                 targetSize, numBuffers, overridePhysicalCameraId,
486                                 multiResStreamConfig, createMultiResReader, imageDropperListener,
487                                 handler, dynamicRangeProfile, streamInfo.getStreamUseCase());
488                     }
489                     break;
490                 }
491                 default:
492                     fail("Unknown output format " + format);
493             }
494         }
495     }
496 
497     /**
498      * Close pending images and clean up an {@link android.media.ImageReader} object.
499      * @param reader an {@link android.media.ImageReader} to close.
500      */
closeImageReader(ImageReader reader)501     public static void closeImageReader(ImageReader reader) {
502         if (reader != null) {
503             reader.close();
504         }
505     }
506 
507     /**
508      * Close the pending images then close current active {@link ImageReader} objects.
509      */
closeImageReaders(ImageReader[] readers)510     public static void closeImageReaders(ImageReader[] readers) {
511         if ((readers != null) && (readers.length > 0)) {
512             for (ImageReader reader : readers) {
513                 CameraTestUtils.closeImageReader(reader);
514             }
515         }
516     }
517 
518     /**
519      * Close pending images and clean up an {@link android.media.ImageWriter} object.
520      * @param writer an {@link android.media.ImageWriter} to close.
521      */
closeImageWriter(ImageWriter writer)522     public static void closeImageWriter(ImageWriter writer) {
523         if (writer != null) {
524             writer.close();
525         }
526     }
527 
528     /**
529      * Dummy listener that release the image immediately once it is available.
530      *
531      * <p>
532      * It can be used for the case where we don't care the image data at all.
533      * </p>
534      */
535     public static class ImageDropperListener implements ImageReader.OnImageAvailableListener {
536         @Override
onImageAvailable(ImageReader reader)537         public synchronized void onImageAvailable(ImageReader reader) {
538             Image image = null;
539             try {
540                 image = reader.acquireNextImage();
541             } finally {
542                 if (image != null) {
543                     image.close();
544                     mImagesDropped++;
545                 }
546             }
547         }
548 
getImageCount()549         public synchronized int getImageCount() {
550             return mImagesDropped;
551         }
552 
resetImageCount()553         public synchronized void resetImageCount() {
554             mImagesDropped = 0;
555         }
556 
557         private int mImagesDropped = 0;
558     }
559 
560     /**
561      * Image listener that release the image immediately after validating the image
562      */
563     public static class ImageVerifierListener implements ImageReader.OnImageAvailableListener {
564         private Size mSize;
565         private int mFormat;
566         // Whether the parent ImageReader is valid or not. If the parent ImageReader
567         // is destroyed, the acquired Image may become invalid.
568         private boolean mReaderIsValid;
569 
ImageVerifierListener(Size sz, int format)570         public ImageVerifierListener(Size sz, int format) {
571             mSize = sz;
572             mFormat = format;
573             mReaderIsValid = true;
574         }
575 
onReaderDestroyed()576         public synchronized void onReaderDestroyed() {
577             mReaderIsValid = false;
578         }
579 
580         @Override
onImageAvailable(ImageReader reader)581         public synchronized void onImageAvailable(ImageReader reader) {
582             Image image = null;
583             try {
584                 image = reader.acquireNextImage();
585             } finally {
586                 if (image != null) {
587                     // Should only do some quick validity checks in callback, as the ImageReader
588                     // could be closed asynchronously, which will close all images acquired from
589                     // this ImageReader.
590                     checkImage(image, mSize.getWidth(), mSize.getHeight(), mFormat);
591                     // checkAndroidImageFormat calls into underlying Image object, which could
592                     // become invalid if the ImageReader is destroyed.
593                     if (mReaderIsValid) {
594                         checkAndroidImageFormat(image);
595                     }
596                     image.close();
597                 }
598             }
599         }
600     }
601 
602     public static class SimpleImageReaderListener
603             implements ImageReader.OnImageAvailableListener {
604         private final LinkedBlockingQueue<Image> mQueue =
605                 new LinkedBlockingQueue<Image>();
606         // Indicate whether this listener will drop images or not,
607         // when the queued images reaches the reader maxImages
608         private final boolean mAsyncMode;
609         // maxImages held by the queue in async mode.
610         private final int mMaxImages;
611 
612         /**
613          * Create a synchronous SimpleImageReaderListener that queues the images
614          * automatically when they are available, no image will be dropped. If
615          * the caller doesn't call getImage(), the producer will eventually run
616          * into buffer starvation.
617          */
SimpleImageReaderListener()618         public SimpleImageReaderListener() {
619             mAsyncMode = false;
620             mMaxImages = 0;
621         }
622 
623         /**
624          * Create a synchronous/asynchronous SimpleImageReaderListener that
625          * queues the images automatically when they are available. For
626          * asynchronous listener, image will be dropped if the queued images
627          * reach to maxImages queued. If the caller doesn't call getImage(), the
628          * producer will not be blocked. For synchronous listener, no image will
629          * be dropped. If the caller doesn't call getImage(), the producer will
630          * eventually run into buffer starvation.
631          *
632          * @param asyncMode If the listener is operating at asynchronous mode.
633          * @param maxImages The max number of images held by this listener.
634          */
635         /**
636          *
637          * @param asyncMode
638          */
SimpleImageReaderListener(boolean asyncMode, int maxImages)639         public SimpleImageReaderListener(boolean asyncMode, int maxImages) {
640             mAsyncMode = asyncMode;
641             mMaxImages = maxImages;
642         }
643 
644         @Override
onImageAvailable(ImageReader reader)645         public void onImageAvailable(ImageReader reader) {
646             try {
647                 Image imge = reader.acquireNextImage();
648                 if (imge == null) {
649                     return;
650                 }
651                 mQueue.put(imge);
652                 if (mAsyncMode && mQueue.size() >= mMaxImages) {
653                     Image img = mQueue.poll();
654                     img.close();
655                 }
656             } catch (InterruptedException e) {
657                 throw new UnsupportedOperationException(
658                         "Can't handle InterruptedException in onImageAvailable");
659             }
660         }
661 
662         /**
663          * Get an image from the image reader.
664          *
665          * @param timeout Timeout value for the wait.
666          * @return The image from the image reader.
667          */
getImage(long timeout)668         public Image getImage(long timeout) throws InterruptedException {
669             Image image = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
670             assertNotNull("Wait for an image timed out in " + timeout + "ms", image);
671             return image;
672         }
673 
674         /**
675          * Drain the pending images held by this listener currently.
676          *
677          */
drain()678         public void drain() {
679             while (!mQueue.isEmpty()) {
680                 Image image = mQueue.poll();
681                 assertNotNull("Unable to get an image", image);
682                 image.close();
683             }
684         }
685     }
686 
687     public static class SimpleImageWriterListener implements ImageWriter.OnImageReleasedListener {
688         private final Semaphore mImageReleasedSema = new Semaphore(0);
689         private final ImageWriter mWriter;
690         @Override
onImageReleased(ImageWriter writer)691         public void onImageReleased(ImageWriter writer) {
692             if (writer != mWriter) {
693                 return;
694             }
695 
696             if (VERBOSE) {
697                 Log.v(TAG, "Input image is released");
698             }
699             mImageReleasedSema.release();
700         }
701 
SimpleImageWriterListener(ImageWriter writer)702         public SimpleImageWriterListener(ImageWriter writer) {
703             if (writer == null) {
704                 throw new IllegalArgumentException("writer cannot be null");
705             }
706             mWriter = writer;
707         }
708 
waitForImageReleased(long timeoutMs)709         public void waitForImageReleased(long timeoutMs) throws InterruptedException {
710             if (!mImageReleasedSema.tryAcquire(timeoutMs, TimeUnit.MILLISECONDS)) {
711                 fail("wait for image available timed out after " + timeoutMs + "ms");
712             }
713         }
714     }
715 
716     public static class ImageAndMultiResStreamInfo {
717         public final Image image;
718         public final MultiResolutionStreamInfo streamInfo;
719 
ImageAndMultiResStreamInfo(Image image, MultiResolutionStreamInfo streamInfo)720         public ImageAndMultiResStreamInfo(Image image, MultiResolutionStreamInfo streamInfo) {
721             this.image = image;
722             this.streamInfo = streamInfo;
723         }
724     }
725 
726     public static class SimpleMultiResolutionImageReaderListener
727             implements ImageReader.OnImageAvailableListener {
SimpleMultiResolutionImageReaderListener(MultiResolutionImageReader owner, int maxBuffers, boolean acquireLatest)728         public SimpleMultiResolutionImageReaderListener(MultiResolutionImageReader owner,
729                 int maxBuffers, boolean acquireLatest) {
730             mOwner = owner;
731             mMaxBuffers = maxBuffers;
732             mAcquireLatest = acquireLatest;
733         }
734 
735         @Override
onImageAvailable(ImageReader reader)736         public void onImageAvailable(ImageReader reader) {
737             if (VERBOSE) Log.v(TAG, "new image available from reader " + reader.toString());
738 
739             if (mAcquireLatest) {
740                 synchronized (mLock) {
741                     // If there is switch of image readers, acquire and releases all images
742                     // from the previous image reader
743                     if (mLastReader != reader) {
744                         if (mLastReader != null) {
745                             Image image = mLastReader.acquireLatestImage();
746                             if (image != null) {
747                                 image.close();
748                             }
749                         }
750                         mLastReader = reader;
751                     }
752                 }
753                 mImageAvailable.open();
754             } else {
755                 if (mQueue.size() < mMaxBuffers) {
756                     Image image = reader.acquireNextImage();
757                     MultiResolutionStreamInfo multiResStreamInfo =
758                             mOwner.getStreamInfoForImageReader(reader);
759                     mQueue.offer(new ImageAndMultiResStreamInfo(image, multiResStreamInfo));
760                 }
761             }
762         }
763 
getAnyImageAndInfoAvailable(long timeoutMs)764         public ImageAndMultiResStreamInfo getAnyImageAndInfoAvailable(long timeoutMs)
765                 throws Exception {
766             if (mAcquireLatest) {
767                 Image image = null;
768                 if (mImageAvailable.block(timeoutMs)) {
769                     synchronized (mLock) {
770                         if (mLastReader != null) {
771                             image = mLastReader.acquireLatestImage();
772                             if (VERBOSE) Log.v(TAG, "acquireLatestImage from "
773                                     + mLastReader.toString() + " produces " + image);
774                         } else {
775                             fail("invalid image reader");
776                         }
777                     }
778                     mImageAvailable.close();
779                 } else {
780                     fail("wait for image available time out after " + timeoutMs + "ms");
781                 }
782                 return image == null ? null : new ImageAndMultiResStreamInfo(image,
783                         mOwner.getStreamInfoForImageReader(mLastReader));
784             } else {
785                 ImageAndMultiResStreamInfo imageAndInfo = mQueue.poll(timeoutMs,
786                         java.util.concurrent.TimeUnit.MILLISECONDS);
787                 if (imageAndInfo == null) {
788                     fail("wait for image available timed out after " + timeoutMs + "ms");
789                 }
790                 return imageAndInfo;
791             }
792         }
793 
reset()794         public void reset() {
795             while (!mQueue.isEmpty()) {
796                 ImageAndMultiResStreamInfo imageAndInfo = mQueue.poll();
797                 assertNotNull("Acquired image is not valid", imageAndInfo.image);
798                 imageAndInfo.image.close();
799             }
800             mImageAvailable.close();
801             mLastReader = null;
802         }
803 
804         private LinkedBlockingQueue<ImageAndMultiResStreamInfo> mQueue =
805                 new LinkedBlockingQueue<ImageAndMultiResStreamInfo>();
806         private final MultiResolutionImageReader mOwner;
807         private final int mMaxBuffers;
808         private final boolean mAcquireLatest;
809         private ConditionVariable mImageAvailable = new ConditionVariable();
810         private ImageReader mLastReader = null;
811         private final Object mLock = new Object();
812     }
813 
814     public static class SimpleCaptureCallback extends CameraCaptureSession.CaptureCallback {
815         private final LinkedBlockingQueue<TotalCaptureResult> mQueue =
816                 new LinkedBlockingQueue<TotalCaptureResult>();
817         private final LinkedBlockingQueue<CaptureFailure> mFailureQueue =
818                 new LinkedBlockingQueue<>();
819         // (Surface, framenumber) pair for lost buffers
820         private final LinkedBlockingQueue<Pair<Surface, Long>> mBufferLostQueue =
821                 new LinkedBlockingQueue<>();
822         private final LinkedBlockingQueue<Integer> mAbortQueue =
823                 new LinkedBlockingQueue<>();
824         // Pair<CaptureRequest, Long> is a pair of capture request and timestamp.
825         private final LinkedBlockingQueue<Pair<CaptureRequest, Long>> mCaptureStartQueue =
826                 new LinkedBlockingQueue<>();
827         // Pair<Int, Long> is a pair of sequence id and frame number
828         private final LinkedBlockingQueue<Pair<Integer, Long>> mCaptureSequenceCompletedQueue =
829                 new LinkedBlockingQueue<>();
830 
831         private AtomicLong mNumFramesArrived = new AtomicLong(0);
832 
833         @Override
onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber)834         public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
835                 long timestamp, long frameNumber) {
836             try {
837                 mCaptureStartQueue.put(new Pair(request, timestamp));
838             } catch (InterruptedException e) {
839                 throw new UnsupportedOperationException(
840                         "Can't handle InterruptedException in onCaptureStarted");
841             }
842         }
843 
844         @Override
onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result)845         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
846                 TotalCaptureResult result) {
847             try {
848                 mNumFramesArrived.incrementAndGet();
849                 mQueue.put(result);
850             } catch (InterruptedException e) {
851                 throw new UnsupportedOperationException(
852                         "Can't handle InterruptedException in onCaptureCompleted");
853             }
854         }
855 
856         @Override
onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure)857         public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
858                 CaptureFailure failure) {
859             try {
860                 mFailureQueue.put(failure);
861             } catch (InterruptedException e) {
862                 throw new UnsupportedOperationException(
863                         "Can't handle InterruptedException in onCaptureFailed");
864             }
865         }
866 
867         @Override
onCaptureSequenceAborted(CameraCaptureSession session, int sequenceId)868         public void onCaptureSequenceAborted(CameraCaptureSession session, int sequenceId) {
869             try {
870                 mAbortQueue.put(sequenceId);
871             } catch (InterruptedException e) {
872                 throw new UnsupportedOperationException(
873                         "Can't handle InterruptedException in onCaptureAborted");
874             }
875         }
876 
877         @Override
onCaptureSequenceCompleted(CameraCaptureSession session, int sequenceId, long frameNumber)878         public void onCaptureSequenceCompleted(CameraCaptureSession session, int sequenceId,
879                 long frameNumber) {
880             try {
881                 mCaptureSequenceCompletedQueue.put(new Pair(sequenceId, frameNumber));
882             } catch (InterruptedException e) {
883                 throw new UnsupportedOperationException(
884                         "Can't handle InterruptedException in onCaptureSequenceCompleted");
885             }
886         }
887 
888         @Override
onCaptureBufferLost(CameraCaptureSession session, CaptureRequest request, Surface target, long frameNumber)889         public void onCaptureBufferLost(CameraCaptureSession session,
890                 CaptureRequest request, Surface target, long frameNumber) {
891             try {
892                 mBufferLostQueue.put(new Pair<>(target, frameNumber));
893             } catch (InterruptedException e) {
894                 throw new UnsupportedOperationException(
895                         "Can't handle InterruptedException in onCaptureBufferLost");
896             }
897         }
898 
getTotalNumFrames()899         public long getTotalNumFrames() {
900             return mNumFramesArrived.get();
901         }
902 
getCaptureResult(long timeout)903         public CaptureResult getCaptureResult(long timeout) {
904             return getTotalCaptureResult(timeout);
905         }
906 
getCaptureResult(long timeout, long timestamp)907         public TotalCaptureResult getCaptureResult(long timeout, long timestamp) {
908             try {
909                 long currentTs = -1L;
910                 TotalCaptureResult result;
911                 while (true) {
912                     result = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
913                     if (result == null) {
914                         throw new RuntimeException(
915                                 "Wait for a capture result timed out in " + timeout + "ms");
916                     }
917                     currentTs = result.get(CaptureResult.SENSOR_TIMESTAMP);
918                     if (currentTs == timestamp) {
919                         return result;
920                     }
921                 }
922 
923             } catch (InterruptedException e) {
924                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
925             }
926         }
927 
getTotalCaptureResult(long timeout)928         public TotalCaptureResult getTotalCaptureResult(long timeout) {
929             try {
930                 TotalCaptureResult result = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
931                 assertNotNull("Wait for a capture result timed out in " + timeout + "ms", result);
932                 return result;
933             } catch (InterruptedException e) {
934                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
935             }
936         }
937 
938         /**
939          * Get the {@link #CaptureResult capture result} for a given
940          * {@link #CaptureRequest capture request}.
941          *
942          * @param myRequest The {@link #CaptureRequest capture request} whose
943          *            corresponding {@link #CaptureResult capture result} was
944          *            being waited for
945          * @param numResultsWait Number of frames to wait for the capture result
946          *            before timeout.
947          * @throws TimeoutRuntimeException If more than numResultsWait results are
948          *            seen before the result matching myRequest arrives, or each
949          *            individual wait for result times out after
950          *            {@value #CAPTURE_RESULT_TIMEOUT_MS}ms.
951          */
getCaptureResultForRequest(CaptureRequest myRequest, int numResultsWait)952         public CaptureResult getCaptureResultForRequest(CaptureRequest myRequest,
953                 int numResultsWait) {
954             return getTotalCaptureResultForRequest(myRequest, numResultsWait);
955         }
956 
957         /**
958          * Get the {@link #TotalCaptureResult total capture result} for a given
959          * {@link #CaptureRequest capture request}.
960          *
961          * @param myRequest The {@link #CaptureRequest capture request} whose
962          *            corresponding {@link #TotalCaptureResult capture result} was
963          *            being waited for
964          * @param numResultsWait Number of frames to wait for the capture result
965          *            before timeout.
966          * @throws TimeoutRuntimeException If more than numResultsWait results are
967          *            seen before the result matching myRequest arrives, or each
968          *            individual wait for result times out after
969          *            {@value #CAPTURE_RESULT_TIMEOUT_MS}ms.
970          */
getTotalCaptureResultForRequest(CaptureRequest myRequest, int numResultsWait)971         public TotalCaptureResult getTotalCaptureResultForRequest(CaptureRequest myRequest,
972                 int numResultsWait) {
973             return getTotalCaptureResultForRequest(myRequest, numResultsWait,
974                     CAPTURE_RESULT_TIMEOUT_MS);
975         }
976 
977         /**
978          * Get the {@link #TotalCaptureResult total capture result} for a given
979          * {@link #CaptureRequest capture request}.
980          *
981          * @param myRequest The {@link #CaptureRequest capture request} whose
982          *            corresponding {@link #TotalCaptureResult capture result} was
983          *            being waited for
984          * @param numResultsWait Number of frames to wait for the capture result
985          *            before timeout.
986          * @param timeoutForResult Timeout to wait for each capture result.
987          * @throws TimeoutRuntimeException If more than numResultsWait results are
988          *            seen before the result matching myRequest arrives, or each
989          *            individual wait for result times out after
990          *            timeoutForResult ms.
991          */
getTotalCaptureResultForRequest(CaptureRequest myRequest, int numResultsWait, int timeoutForResult)992         public TotalCaptureResult getTotalCaptureResultForRequest(CaptureRequest myRequest,
993                 int numResultsWait, int timeoutForResult) {
994             ArrayList<CaptureRequest> captureRequests = new ArrayList<>(1);
995             captureRequests.add(myRequest);
996             return getTotalCaptureResultsForRequests(
997                     captureRequests, numResultsWait, timeoutForResult)[0];
998         }
999 
1000         /**
1001          * Get an array of {@link #TotalCaptureResult total capture results} for a given list of
1002          * {@link #CaptureRequest capture requests}. This can be used when the order of results
1003          * may not the same as the order of requests.
1004          *
1005          * @param captureRequests The list of {@link #CaptureRequest capture requests} whose
1006          *            corresponding {@link #TotalCaptureResult capture results} are
1007          *            being waited for.
1008          * @param numResultsWait Number of frames to wait for the capture results
1009          *            before timeout.
1010          * @throws TimeoutRuntimeException If more than numResultsWait results are
1011          *            seen before all the results matching captureRequests arrives.
1012          */
getTotalCaptureResultsForRequests( List<CaptureRequest> captureRequests, int numResultsWait)1013         public TotalCaptureResult[] getTotalCaptureResultsForRequests(
1014                 List<CaptureRequest> captureRequests, int numResultsWait) {
1015             return getTotalCaptureResultsForRequests(captureRequests, numResultsWait,
1016                     CAPTURE_RESULT_TIMEOUT_MS);
1017         }
1018 
1019         /**
1020          * Get an array of {@link #TotalCaptureResult total capture results} for a given list of
1021          * {@link #CaptureRequest capture requests}. This can be used when the order of results
1022          * may not the same as the order of requests.
1023          *
1024          * @param captureRequests The list of {@link #CaptureRequest capture requests} whose
1025          *            corresponding {@link #TotalCaptureResult capture results} are
1026          *            being waited for.
1027          * @param numResultsWait Number of frames to wait for the capture results
1028          *            before timeout.
1029          * @param timeoutForResult Timeout to wait for each capture result.
1030          * @throws TimeoutRuntimeException If more than numResultsWait results are
1031          *            seen before all the results matching captureRequests arrives.
1032          */
getTotalCaptureResultsForRequests( List<CaptureRequest> captureRequests, int numResultsWait, int timeoutForResult)1033         public TotalCaptureResult[] getTotalCaptureResultsForRequests(
1034                 List<CaptureRequest> captureRequests, int numResultsWait, int timeoutForResult) {
1035             if (numResultsWait < 0) {
1036                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
1037             }
1038             if (captureRequests == null || captureRequests.size() == 0) {
1039                 throw new IllegalArgumentException("captureRequests must have at least 1 request.");
1040             }
1041 
1042             // Create a request -> a list of result indices map that it will wait for.
1043             HashMap<CaptureRequest, ArrayList<Integer>> remainingResultIndicesMap = new HashMap<>();
1044             for (int i = 0; i < captureRequests.size(); i++) {
1045                 CaptureRequest request = captureRequests.get(i);
1046                 ArrayList<Integer> indices = remainingResultIndicesMap.get(request);
1047                 if (indices == null) {
1048                     indices = new ArrayList<>();
1049                     remainingResultIndicesMap.put(request, indices);
1050                 }
1051                 indices.add(i);
1052             }
1053 
1054             TotalCaptureResult[] results = new TotalCaptureResult[captureRequests.size()];
1055             int i = 0;
1056             do {
1057                 TotalCaptureResult result = getTotalCaptureResult(timeoutForResult);
1058                 CaptureRequest request = result.getRequest();
1059                 ArrayList<Integer> indices = remainingResultIndicesMap.get(request);
1060                 if (indices != null) {
1061                     results[indices.get(0)] = result;
1062                     indices.remove(0);
1063 
1064                     // Remove the entry if all results for this request has been fulfilled.
1065                     if (indices.isEmpty()) {
1066                         remainingResultIndicesMap.remove(request);
1067                     }
1068                 }
1069 
1070                 if (remainingResultIndicesMap.isEmpty()) {
1071                     return results;
1072                 }
1073             } while (i++ < numResultsWait);
1074 
1075             throw new TimeoutRuntimeException("Unable to get the expected capture result after "
1076                     + "waiting for " + numResultsWait + " results");
1077         }
1078 
1079         /**
1080          * Get an array list of {@link #CaptureFailure capture failure} with maxNumFailures entries
1081          * at most. If it times out before maxNumFailures failures are received, return the failures
1082          * received so far.
1083          *
1084          * @param maxNumFailures The maximal number of failures to return. If it times out before
1085          *                       the maximal number of failures are received, return the received
1086          *                       failures so far.
1087          * @throws UnsupportedOperationException If an error happens while waiting on the failure.
1088          */
getCaptureFailures(long maxNumFailures)1089         public ArrayList<CaptureFailure> getCaptureFailures(long maxNumFailures) {
1090             ArrayList<CaptureFailure> failures = new ArrayList<>();
1091             try {
1092                 for (int i = 0; i < maxNumFailures; i++) {
1093                     CaptureFailure failure = mFailureQueue.poll(CAPTURE_RESULT_TIMEOUT_MS,
1094                             TimeUnit.MILLISECONDS);
1095                     if (failure == null) {
1096                         // If waiting on a failure times out, return the failures so far.
1097                         break;
1098                     }
1099                     failures.add(failure);
1100                 }
1101             }  catch (InterruptedException e) {
1102                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1103             }
1104 
1105             return failures;
1106         }
1107 
1108         /**
1109          * Get an array list of lost buffers with maxNumLost entries at most.
1110          * If it times out before maxNumLost buffer lost callbacks are received, return the
1111          * lost callbacks received so far.
1112          *
1113          * @param maxNumLost The maximal number of buffer lost failures to return. If it times out
1114          *                   before the maximal number of failures are received, return the received
1115          *                   buffer lost failures so far.
1116          * @throws UnsupportedOperationException If an error happens while waiting on the failure.
1117          */
getLostBuffers(long maxNumLost)1118         public ArrayList<Pair<Surface, Long>> getLostBuffers(long maxNumLost) {
1119             ArrayList<Pair<Surface, Long>> failures = new ArrayList<>();
1120             try {
1121                 for (int i = 0; i < maxNumLost; i++) {
1122                     Pair<Surface, Long> failure = mBufferLostQueue.poll(CAPTURE_RESULT_TIMEOUT_MS,
1123                             TimeUnit.MILLISECONDS);
1124                     if (failure == null) {
1125                         // If waiting on a failure times out, return the failures so far.
1126                         break;
1127                     }
1128                     failures.add(failure);
1129                 }
1130             }  catch (InterruptedException e) {
1131                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1132             }
1133 
1134             return failures;
1135         }
1136 
1137         /**
1138          * Get an array list of aborted capture sequence ids with maxNumAborts entries
1139          * at most. If it times out before maxNumAborts are received, return the aborted sequences
1140          * received so far.
1141          *
1142          * @param maxNumAborts The maximal number of aborted sequences to return. If it times out
1143          *                     before the maximal number of aborts are received, return the received
1144          *                     failed sequences so far.
1145          * @throws UnsupportedOperationException If an error happens while waiting on the failed
1146          *                                       sequences.
1147          */
geAbortedSequences(long maxNumAborts)1148         public ArrayList<Integer> geAbortedSequences(long maxNumAborts) {
1149             ArrayList<Integer> abortList = new ArrayList<>();
1150             try {
1151                 for (int i = 0; i < maxNumAborts; i++) {
1152                     Integer abortSequence = mAbortQueue.poll(CAPTURE_RESULT_TIMEOUT_MS,
1153                             TimeUnit.MILLISECONDS);
1154                     if (abortSequence == null) {
1155                         break;
1156                     }
1157                     abortList.add(abortSequence);
1158                 }
1159             }  catch (InterruptedException e) {
1160                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1161             }
1162 
1163             return abortList;
1164         }
1165 
1166         /**
1167          * Wait until the capture start of a request and expected timestamp arrives or it times
1168          * out after a number of capture starts.
1169          *
1170          * @param request The request for the capture start to wait for.
1171          * @param timestamp The timestamp for the capture start to wait for.
1172          * @param numCaptureStartsWait The number of capture start events to wait for before timing
1173          *                             out.
1174          */
waitForCaptureStart(CaptureRequest request, Long timestamp, int numCaptureStartsWait)1175         public void waitForCaptureStart(CaptureRequest request, Long timestamp,
1176                 int numCaptureStartsWait) throws Exception {
1177             Pair<CaptureRequest, Long> expectedShutter = new Pair<>(request, timestamp);
1178 
1179             int i = 0;
1180             do {
1181                 Pair<CaptureRequest, Long> shutter = mCaptureStartQueue.poll(
1182                         CAPTURE_RESULT_TIMEOUT_MS, TimeUnit.MILLISECONDS);
1183 
1184                 if (shutter == null) {
1185                     throw new TimeoutRuntimeException("Unable to get any more capture start " +
1186                             "event after waiting for " + CAPTURE_RESULT_TIMEOUT_MS + " ms.");
1187                 } else if (expectedShutter.equals(shutter)) {
1188                     return;
1189                 }
1190 
1191             } while (i++ < numCaptureStartsWait);
1192 
1193             throw new TimeoutRuntimeException("Unable to get the expected capture start " +
1194                     "event after waiting for " + numCaptureStartsWait + " capture starts");
1195         }
1196 
1197         /**
1198          * Wait until it receives capture sequence completed callback for a given squence ID.
1199          *
1200          * @param sequenceId The sequence ID of the capture sequence completed callback to wait for.
1201          * @param timeoutMs Time to wait for each capture sequence complete callback before
1202          *                  timing out.
1203          */
getCaptureSequenceLastFrameNumber(int sequenceId, long timeoutMs)1204         public long getCaptureSequenceLastFrameNumber(int sequenceId, long timeoutMs) {
1205             try {
1206                 while (true) {
1207                     Pair<Integer, Long> completedSequence =
1208                             mCaptureSequenceCompletedQueue.poll(timeoutMs, TimeUnit.MILLISECONDS);
1209                     assertNotNull("Wait for a capture sequence completed timed out in " +
1210                             timeoutMs + "ms", completedSequence);
1211 
1212                     if (completedSequence.first.equals(sequenceId)) {
1213                         return completedSequence.second.longValue();
1214                     }
1215                 }
1216             } catch (InterruptedException e) {
1217                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1218             }
1219         }
1220 
hasMoreResults()1221         public boolean hasMoreResults()
1222         {
1223             return !mQueue.isEmpty();
1224         }
1225 
hasMoreFailures()1226         public boolean hasMoreFailures()
1227         {
1228             return !mFailureQueue.isEmpty();
1229         }
1230 
getNumLostBuffers()1231         public int getNumLostBuffers()
1232         {
1233             return mBufferLostQueue.size();
1234         }
1235 
hasMoreAbortedSequences()1236         public boolean hasMoreAbortedSequences()
1237         {
1238             return !mAbortQueue.isEmpty();
1239         }
1240 
getCaptureStartTimestamps(int count)1241         public List<Long> getCaptureStartTimestamps(int count) {
1242             Iterator<Pair<CaptureRequest, Long>> iter = mCaptureStartQueue.iterator();
1243             List<Long> timestamps = new ArrayList<Long>();
1244             try {
1245                 while (timestamps.size() < count) {
1246                     Pair<CaptureRequest, Long> captureStart = mCaptureStartQueue.poll(
1247                             CAPTURE_RESULT_TIMEOUT_MS, TimeUnit.MILLISECONDS);
1248                     assertNotNull("Wait for a capture start timed out in "
1249                             + CAPTURE_RESULT_TIMEOUT_MS + "ms", captureStart);
1250 
1251                     timestamps.add(captureStart.second);
1252                 }
1253                 return timestamps;
1254             } catch (InterruptedException e) {
1255                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1256             }
1257         }
1258 
drain()1259         public void drain() {
1260             mQueue.clear();
1261             mNumFramesArrived.getAndSet(0);
1262             mFailureQueue.clear();
1263             mBufferLostQueue.clear();
1264             mCaptureStartQueue.clear();
1265             mAbortQueue.clear();
1266         }
1267     }
1268 
1269     private static class BlockingCameraManager
1270             extends com.android.ex.camera2.blocking.BlockingCameraManager {
1271 
BlockingCameraManager(CameraManager manager)1272         BlockingCameraManager(CameraManager manager) {
1273             super(manager);
1274         }
1275 
openCamera(String cameraId, boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)1276         public CameraDevice openCamera(String cameraId, boolean overrideToPortrait,
1277                 CameraDevice.StateCallback listener, Handler handler)
1278                 throws CameraAccessException, BlockingOpenException {
1279             if (handler == null) {
1280                 throw new IllegalArgumentException("handler must not be null");
1281             } else if (handler.getLooper() == Looper.myLooper()) {
1282                 throw new IllegalArgumentException(
1283                         "handler's looper must not be the current looper");
1284             }
1285 
1286             return (new OpenListener(mManager, cameraId, overrideToPortrait, listener, handler))
1287                     .blockUntilOpen();
1288         }
1289 
1290         protected class OpenListener
1291                 extends com.android.ex.camera2.blocking.BlockingCameraManager.OpenListener {
OpenListener(CameraManager manager, String cameraId, boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)1292             OpenListener(CameraManager manager, String cameraId, boolean overrideToPortrait,
1293                     CameraDevice.StateCallback listener, Handler handler)
1294                     throws CameraAccessException {
1295                 super(cameraId, listener);
1296                 manager.openCamera(cameraId, overrideToPortrait, handler, this);
1297             }
1298         }
1299     }
1300 
hasCapability(CameraCharacteristics characteristics, int capability)1301     public static boolean hasCapability(CameraCharacteristics characteristics, int capability) {
1302         int [] capabilities =
1303                 characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
1304         for (int c : capabilities) {
1305             if (c == capability) {
1306                 return true;
1307             }
1308         }
1309         return false;
1310     }
1311 
isSystemCamera(CameraManager manager, String cameraId)1312     public static boolean isSystemCamera(CameraManager manager, String cameraId)
1313             throws CameraAccessException {
1314         CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
1315         return hasCapability(characteristics,
1316                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA);
1317     }
1318 
getCameraIdListForTesting(CameraManager manager, boolean getSystemCameras)1319     public static String[] getCameraIdListForTesting(CameraManager manager,
1320             boolean getSystemCameras)
1321             throws CameraAccessException {
1322         String [] ids = manager.getCameraIdListNoLazy();
1323         List<String> idsForTesting = new ArrayList<String>();
1324         for (String id : ids) {
1325             boolean isSystemCamera = isSystemCamera(manager, id);
1326             if (getSystemCameras == isSystemCamera) {
1327                 idsForTesting.add(id);
1328             }
1329         }
1330         return idsForTesting.toArray(new String[idsForTesting.size()]);
1331     }
1332 
getConcurrentCameraIds(CameraManager manager, boolean getSystemCameras)1333     public static Set<Set<String>> getConcurrentCameraIds(CameraManager manager,
1334             boolean getSystemCameras)
1335             throws CameraAccessException {
1336         Set<String> cameraIds = new HashSet<String>(Arrays.asList(getCameraIdListForTesting(manager, getSystemCameras)));
1337         Set<Set<String>> combinations =  manager.getConcurrentCameraIds();
1338         Set<Set<String>> correctComb = new HashSet<Set<String>>();
1339         for (Set<String> comb : combinations) {
1340             Set<String> filteredIds = new HashSet<String>();
1341             for (String id : comb) {
1342                 if (cameraIds.contains(id)) {
1343                     filteredIds.add(id);
1344                 }
1345             }
1346             if (filteredIds.isEmpty()) {
1347                 continue;
1348             }
1349             correctComb.add(filteredIds);
1350         }
1351         return correctComb;
1352     }
1353 
1354     /**
1355      * Block until the camera is opened.
1356      *
1357      * <p>Don't use this to test #onDisconnected/#onError since this will throw
1358      * an AssertionError if it fails to open the camera device.</p>
1359      *
1360      * @return CameraDevice opened camera device
1361      *
1362      * @throws IllegalArgumentException
1363      *            If the handler is null, or if the handler's looper is current.
1364      * @throws CameraAccessException
1365      *            If open fails immediately.
1366      * @throws BlockingOpenException
1367      *            If open fails after blocking for some amount of time.
1368      * @throws TimeoutRuntimeException
1369      *            If opening times out. Typically unrecoverable.
1370      */
openCamera(CameraManager manager, String cameraId, CameraDevice.StateCallback listener, Handler handler)1371     public static CameraDevice openCamera(CameraManager manager, String cameraId,
1372             CameraDevice.StateCallback listener, Handler handler) throws CameraAccessException,
1373             BlockingOpenException {
1374 
1375         /**
1376          * Although camera2 API allows 'null' Handler (it will just use the current
1377          * thread's Looper), this is not what we want for CTS.
1378          *
1379          * In CTS the default looper is used only to process events in between test runs,
1380          * so anything sent there would not be executed inside a test and the test would fail.
1381          *
1382          * In this case, BlockingCameraManager#openCamera performs the check for us.
1383          */
1384         return (new CameraTestUtils.BlockingCameraManager(manager))
1385                 .openCamera(cameraId, listener, handler);
1386     }
1387 
1388     /**
1389      * Block until the camera is opened.
1390      *
1391      * <p>Don't use this to test #onDisconnected/#onError since this will throw
1392      * an AssertionError if it fails to open the camera device.</p>
1393      *
1394      * @throws IllegalArgumentException
1395      *            If the handler is null, or if the handler's looper is current.
1396      * @throws CameraAccessException
1397      *            If open fails immediately.
1398      * @throws BlockingOpenException
1399      *            If open fails after blocking for some amount of time.
1400      * @throws TimeoutRuntimeException
1401      *            If opening times out. Typically unrecoverable.
1402      */
openCamera(CameraManager manager, String cameraId, boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)1403     public static CameraDevice openCamera(CameraManager manager, String cameraId,
1404             boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)
1405             throws CameraAccessException, BlockingOpenException {
1406         return (new CameraTestUtils.BlockingCameraManager(manager))
1407                 .openCamera(cameraId, overrideToPortrait, listener, handler);
1408     }
1409 
1410 
1411     /**
1412      * Block until the camera is opened.
1413      *
1414      * <p>Don't use this to test #onDisconnected/#onError since this will throw
1415      * an AssertionError if it fails to open the camera device.</p>
1416      *
1417      * @throws IllegalArgumentException
1418      *            If the handler is null, or if the handler's looper is current.
1419      * @throws CameraAccessException
1420      *            If open fails immediately.
1421      * @throws BlockingOpenException
1422      *            If open fails after blocking for some amount of time.
1423      * @throws TimeoutRuntimeException
1424      *            If opening times out. Typically unrecoverable.
1425      */
openCamera(CameraManager manager, String cameraId, Handler handler)1426     public static CameraDevice openCamera(CameraManager manager, String cameraId, Handler handler)
1427             throws CameraAccessException,
1428             BlockingOpenException {
1429         return openCamera(manager, cameraId, /*listener*/null, handler);
1430     }
1431 
1432     /**
1433      * Configure a new camera session with output surfaces and type.
1434      *
1435      * @param camera The CameraDevice to be configured.
1436      * @param outputSurfaces The surface list that used for camera output.
1437      * @param listener The callback CameraDevice will notify when capture results are available.
1438      */
configureCameraSession(CameraDevice camera, List<Surface> outputSurfaces, boolean isHighSpeed, CameraCaptureSession.StateCallback listener, Handler handler)1439     public static CameraCaptureSession configureCameraSession(CameraDevice camera,
1440             List<Surface> outputSurfaces, boolean isHighSpeed,
1441             CameraCaptureSession.StateCallback listener, Handler handler)
1442             throws CameraAccessException {
1443         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1444         if (isHighSpeed) {
1445             camera.createConstrainedHighSpeedCaptureSession(outputSurfaces,
1446                     sessionListener, handler);
1447         } else {
1448             camera.createCaptureSession(outputSurfaces, sessionListener, handler);
1449         }
1450         CameraCaptureSession session =
1451                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1452         assertFalse("Camera session should not be a reprocessable session",
1453                 session.isReprocessable());
1454         String sessionType = isHighSpeed ? "High Speed" : "Normal";
1455         assertTrue("Capture session type must be " + sessionType,
1456                 isHighSpeed ==
1457                 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom(session.getClass()));
1458 
1459         return session;
1460     }
1461 
1462     /**
1463      * Build a new constrained camera session with output surfaces, type and recording session
1464      * parameters.
1465      *
1466      * @param camera The CameraDevice to be configured.
1467      * @param outputSurfaces The surface list that used for camera output.
1468      * @param listener The callback CameraDevice will notify when capture results are available.
1469      * @param initialRequest Initial request settings to use as session parameters.
1470      */
buildConstrainedCameraSession(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler, CaptureRequest initialRequest)1471     public static CameraCaptureSession buildConstrainedCameraSession(CameraDevice camera,
1472             List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener,
1473             Handler handler, CaptureRequest initialRequest) throws CameraAccessException {
1474         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1475 
1476         List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size());
1477         for (Surface surface : outputSurfaces) {
1478             outConfigurations.add(new OutputConfiguration(surface));
1479         }
1480         SessionConfiguration sessionConfig = new SessionConfiguration(
1481                 SessionConfiguration.SESSION_HIGH_SPEED, outConfigurations,
1482                 new HandlerExecutor(handler), sessionListener);
1483         sessionConfig.setSessionParameters(initialRequest);
1484         camera.createCaptureSession(sessionConfig);
1485 
1486         CameraCaptureSession session =
1487                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1488         assertFalse("Camera session should not be a reprocessable session",
1489                 session.isReprocessable());
1490         assertTrue("Capture session type must be High Speed",
1491                 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom(
1492                         session.getClass()));
1493 
1494         return session;
1495     }
1496 
1497     /**
1498      * Configure a new camera session with output configurations.
1499      *
1500      * @param camera The CameraDevice to be configured.
1501      * @param outputs The OutputConfiguration list that is used for camera output.
1502      * @param listener The callback CameraDevice will notify when capture results are available.
1503      */
configureCameraSessionWithConfig(CameraDevice camera, List<OutputConfiguration> outputs, CameraCaptureSession.StateCallback listener, Handler handler)1504     public static CameraCaptureSession configureCameraSessionWithConfig(CameraDevice camera,
1505             List<OutputConfiguration> outputs,
1506             CameraCaptureSession.StateCallback listener, Handler handler)
1507             throws CameraAccessException {
1508         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1509         camera.createCaptureSessionByOutputConfigurations(outputs, sessionListener, handler);
1510         CameraCaptureSession session =
1511                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1512         assertFalse("Camera session should not be a reprocessable session",
1513                 session.isReprocessable());
1514         return session;
1515     }
1516 
1517     /**
1518      * Configure a new camera session with output configurations / a session color space.
1519      *
1520      * @param camera The CameraDevice to be configured.
1521      * @param outputs The OutputConfiguration list that is used for camera output.
1522      * @param listener The callback CameraDevice will notify when capture results are available.
1523      * @param colorSpace The ColorSpace for this session.
1524      */
configureCameraSessionWithColorSpace(CameraDevice camera, List<OutputConfiguration> outputs, CameraCaptureSession.StateCallback listener, Handler handler, ColorSpace.Named colorSpace)1525     public static CameraCaptureSession configureCameraSessionWithColorSpace(CameraDevice camera,
1526             List<OutputConfiguration> outputs,
1527             CameraCaptureSession.StateCallback listener, Handler handler,
1528             ColorSpace.Named colorSpace) throws CameraAccessException {
1529         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1530         SessionConfiguration sessionConfiguration = new SessionConfiguration(
1531                 SessionConfiguration.SESSION_REGULAR, outputs,
1532                 new HandlerExecutor(handler), sessionListener);
1533         sessionConfiguration.setColorSpace(colorSpace);
1534         camera.createCaptureSession(sessionConfiguration);
1535         CameraCaptureSession session =
1536                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1537         assertFalse("Camera session should not be a reprocessable session",
1538                 session.isReprocessable());
1539         return session;
1540     }
1541 
1542     /**
1543      * Try configure a new camera session with output configurations.
1544      *
1545      * @param camera The CameraDevice to be configured.
1546      * @param outputs The OutputConfiguration list that is used for camera output.
1547      * @param initialRequest The session parameters passed in during stream configuration
1548      * @param listener The callback CameraDevice will notify when capture results are available.
1549      */
tryConfigureCameraSessionWithConfig(CameraDevice camera, List<OutputConfiguration> outputs, CaptureRequest initialRequest, CameraCaptureSession.StateCallback listener, Handler handler)1550     public static CameraCaptureSession tryConfigureCameraSessionWithConfig(CameraDevice camera,
1551             List<OutputConfiguration> outputs, CaptureRequest initialRequest,
1552             CameraCaptureSession.StateCallback listener, Handler handler)
1553             throws CameraAccessException {
1554         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1555         SessionConfiguration sessionConfig = new SessionConfiguration(
1556                 SessionConfiguration.SESSION_REGULAR, outputs, new HandlerExecutor(handler),
1557                 sessionListener);
1558         sessionConfig.setSessionParameters(initialRequest);
1559         camera.createCaptureSession(sessionConfig);
1560 
1561         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
1562                                    BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
1563         int state = sessionListener.getStateWaiter().waitForAnyOfStates(
1564                 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS);
1565 
1566         CameraCaptureSession session = null;
1567         if (state == BlockingSessionCallback.SESSION_READY) {
1568             session = sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1569             assertFalse("Camera session should not be a reprocessable session",
1570                     session.isReprocessable());
1571         }
1572         return session;
1573     }
1574 
1575     /**
1576      * Configure a new camera session with output surfaces and initial session parameters.
1577      *
1578      * @param camera The CameraDevice to be configured.
1579      * @param outputSurfaces The surface list that used for camera output.
1580      * @param listener The callback CameraDevice will notify when session is available.
1581      * @param handler The handler used to notify callbacks.
1582      * @param initialRequest Initial request settings to use as session parameters.
1583      */
configureCameraSessionWithParameters(CameraDevice camera, List<Surface> outputSurfaces, BlockingSessionCallback listener, Handler handler, CaptureRequest initialRequest)1584     public static CameraCaptureSession configureCameraSessionWithParameters(CameraDevice camera,
1585             List<Surface> outputSurfaces, BlockingSessionCallback listener,
1586             Handler handler, CaptureRequest initialRequest) throws CameraAccessException {
1587         List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size());
1588         for (Surface surface : outputSurfaces) {
1589             outConfigurations.add(new OutputConfiguration(surface));
1590         }
1591         SessionConfiguration sessionConfig = new SessionConfiguration(
1592                 SessionConfiguration.SESSION_REGULAR, outConfigurations,
1593                 new HandlerExecutor(handler), listener);
1594         sessionConfig.setSessionParameters(initialRequest);
1595         camera.createCaptureSession(sessionConfig);
1596 
1597         CameraCaptureSession session = listener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1598         assertFalse("Camera session should not be a reprocessable session",
1599                 session.isReprocessable());
1600         assertFalse("Capture session type must be regular",
1601                 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom(
1602                         session.getClass()));
1603 
1604         return session;
1605     }
1606 
1607     /**
1608      * Configure a new camera session with output surfaces.
1609      *
1610      * @param camera The CameraDevice to be configured.
1611      * @param outputSurfaces The surface list that used for camera output.
1612      * @param listener The callback CameraDevice will notify when capture results are available.
1613      */
configureCameraSession(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)1614     public static CameraCaptureSession configureCameraSession(CameraDevice camera,
1615             List<Surface> outputSurfaces,
1616             CameraCaptureSession.StateCallback listener, Handler handler)
1617             throws CameraAccessException {
1618 
1619         return configureCameraSession(camera, outputSurfaces, /*isHighSpeed*/false,
1620                 listener, handler);
1621     }
1622 
configureReprocessableCameraSession(CameraDevice camera, InputConfiguration inputConfiguration, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)1623     public static CameraCaptureSession configureReprocessableCameraSession(CameraDevice camera,
1624             InputConfiguration inputConfiguration, List<Surface> outputSurfaces,
1625             CameraCaptureSession.StateCallback listener, Handler handler)
1626             throws CameraAccessException {
1627         List<OutputConfiguration> outputConfigs = new ArrayList<OutputConfiguration>();
1628         for (Surface surface : outputSurfaces) {
1629             outputConfigs.add(new OutputConfiguration(surface));
1630         }
1631         CameraCaptureSession session = configureReprocessableCameraSessionWithConfigurations(
1632                 camera, inputConfiguration, outputConfigs, listener, handler);
1633 
1634         return session;
1635     }
1636 
configureReprocessableCameraSessionWithConfigurations( CameraDevice camera, InputConfiguration inputConfiguration, List<OutputConfiguration> outputConfigs, CameraCaptureSession.StateCallback listener, Handler handler)1637     public static CameraCaptureSession configureReprocessableCameraSessionWithConfigurations(
1638             CameraDevice camera, InputConfiguration inputConfiguration,
1639             List<OutputConfiguration> outputConfigs, CameraCaptureSession.StateCallback listener,
1640             Handler handler) throws CameraAccessException {
1641         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1642         SessionConfiguration sessionConfig = new SessionConfiguration(
1643                 SessionConfiguration.SESSION_REGULAR, outputConfigs, new HandlerExecutor(handler),
1644                 sessionListener);
1645         sessionConfig.setInputConfiguration(inputConfiguration);
1646         camera.createCaptureSession(sessionConfig);
1647 
1648         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
1649                                    BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
1650         int state = sessionListener.getStateWaiter().waitForAnyOfStates(
1651                 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS);
1652 
1653         assertTrue("Creating a reprocessable session failed.",
1654                 state == BlockingSessionCallback.SESSION_READY);
1655         CameraCaptureSession session =
1656                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1657         assertTrue("Camera session should be a reprocessable session", session.isReprocessable());
1658 
1659         return session;
1660     }
1661 
1662     /**
1663      * Create a reprocessable camera session with input and output configurations.
1664      *
1665      * @param camera The CameraDevice to be configured.
1666      * @param inputConfiguration The input configuration used to create this session.
1667      * @param outputs The output configurations used to create this session.
1668      * @param listener The callback CameraDevice will notify when capture results are available.
1669      * @param handler The handler used to notify callbacks.
1670      * @return The session ready to use.
1671      * @throws CameraAccessException
1672      */
configureReprocCameraSessionWithConfig(CameraDevice camera, InputConfiguration inputConfiguration, List<OutputConfiguration> outputs, CameraCaptureSession.StateCallback listener, Handler handler)1673     public static CameraCaptureSession configureReprocCameraSessionWithConfig(CameraDevice camera,
1674             InputConfiguration inputConfiguration, List<OutputConfiguration> outputs,
1675             CameraCaptureSession.StateCallback listener, Handler handler)
1676             throws CameraAccessException {
1677         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1678         camera.createReprocessableCaptureSessionByConfigurations(inputConfiguration, outputs,
1679                 sessionListener, handler);
1680 
1681         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
1682                                    BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
1683         int state = sessionListener.getStateWaiter().waitForAnyOfStates(
1684                 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS);
1685 
1686         assertTrue("Creating a reprocessable session failed.",
1687                 state == BlockingSessionCallback.SESSION_READY);
1688 
1689         CameraCaptureSession session =
1690                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1691         assertTrue("Camera session should be a reprocessable session", session.isReprocessable());
1692 
1693         return session;
1694     }
1695 
assertArrayNotEmpty(T arr, String message)1696     public static <T> void assertArrayNotEmpty(T arr, String message) {
1697         assertTrue(message, arr != null && Array.getLength(arr) > 0);
1698     }
1699 
1700     /**
1701      * Check if the format is a legal YUV format camera supported.
1702      */
checkYuvFormat(int format)1703     public static void checkYuvFormat(int format) {
1704         if ((format != ImageFormat.YUV_420_888) &&
1705                 (format != ImageFormat.NV21) &&
1706                 (format != ImageFormat.YV12)) {
1707             fail("Wrong formats: " + format);
1708         }
1709     }
1710 
1711     /**
1712      * Check if image size and format match given size and format.
1713      */
checkImage(Image image, int width, int height, int format)1714     public static void checkImage(Image image, int width, int height, int format) {
1715         checkImage(image, width, height, format, /*colorSpace*/null);
1716     }
1717 
1718     /**
1719      * Check if image size and format match given size and format.
1720      */
checkImage(Image image, int width, int height, int format, ColorSpace colorSpace)1721     public static void checkImage(Image image, int width, int height, int format,
1722             ColorSpace colorSpace) {
1723         // Image reader will wrap YV12/NV21 image by YUV_420_888
1724         if (format == ImageFormat.NV21 || format == ImageFormat.YV12) {
1725             format = ImageFormat.YUV_420_888;
1726         }
1727         assertNotNull("Input image is invalid", image);
1728         assertEquals("Format doesn't match", format, image.getFormat());
1729         assertEquals("Width doesn't match", width, image.getWidth());
1730         assertEquals("Height doesn't match", height, image.getHeight());
1731 
1732         if (colorSpace != null && format != ImageFormat.JPEG && format != ImageFormat.JPEG_R
1733                 && format != ImageFormat.HEIC) {
1734             int dataSpace = image.getDataSpace();
1735             ColorSpace actualColorSpace = ColorSpace.getFromDataSpace(dataSpace);
1736             assertNotNull("getFromDataSpace() returned null for format "
1737                     + format + ", dataSpace " + dataSpace, actualColorSpace);
1738             assertEquals("colorSpace " + actualColorSpace.getId()
1739                     + " does not match expected color space "
1740                     + colorSpace.getId(), colorSpace.getId(), actualColorSpace.getId());
1741         }
1742     }
1743 
1744     /**
1745      * <p>Read data from all planes of an Image into a contiguous unpadded, unpacked
1746      * 1-D linear byte array, such that it can be write into disk, or accessed by
1747      * software conveniently. It supports YUV_420_888/NV21/YV12 and JPEG input
1748      * Image format.</p>
1749      *
1750      * <p>For YUV_420_888/NV21/YV12/Y8/Y16, it returns a byte array that contains
1751      * the Y plane data first, followed by U(Cb), V(Cr) planes if there is any
1752      * (xstride = width, ystride = height for chroma and luma components).</p>
1753      *
1754      * <p>For JPEG, it returns a 1-D byte array contains a complete JPEG image.</p>
1755      *
1756      * <p>For YUV P010, it returns a byte array that contains Y plane first, followed
1757      * by the interleaved U(Cb)/V(Cr) plane.</p>
1758      */
getDataFromImage(Image image)1759     public static byte[] getDataFromImage(Image image) {
1760         assertNotNull("Invalid image:", image);
1761         int format = image.getFormat();
1762         int width = image.getWidth();
1763         int height = image.getHeight();
1764         int rowStride, pixelStride;
1765         byte[] data = null;
1766 
1767         // Read image data
1768         Plane[] planes = image.getPlanes();
1769         assertTrue("Fail to get image planes", planes != null && planes.length > 0);
1770 
1771         // Check image validity
1772         checkAndroidImageFormat(image);
1773 
1774         ByteBuffer buffer = null;
1775         // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer.
1776         // Same goes for DEPTH_POINT_CLOUD, RAW_PRIVATE, DEPTH_JPEG, and HEIC
1777         if (format == ImageFormat.JPEG || format == ImageFormat.DEPTH_POINT_CLOUD ||
1778                 format == ImageFormat.RAW_PRIVATE || format == ImageFormat.DEPTH_JPEG ||
1779                 format == ImageFormat.HEIC || format == ImageFormat.JPEG_R) {
1780             buffer = planes[0].getBuffer();
1781             assertNotNull("Fail to get jpeg/depth/heic ByteBuffer", buffer);
1782             data = new byte[buffer.remaining()];
1783             buffer.get(data);
1784             buffer.rewind();
1785             return data;
1786         } else if (format == ImageFormat.YCBCR_P010) {
1787             // P010 samples are stored within 16 bit values
1788             int offset = 0;
1789             int bytesPerPixelRounded = (ImageFormat.getBitsPerPixel(format) + 7) / 8;
1790             data = new byte[width * height * bytesPerPixelRounded];
1791             assertTrue("Unexpected number of planes, expected " + 3 + " actual " + planes.length,
1792                     planes.length == 3);
1793             for (int i = 0; i < 2; i++) {
1794                 buffer = planes[i].getBuffer();
1795                 assertNotNull("Fail to get bytebuffer from plane", buffer);
1796                 buffer.rewind();
1797                 rowStride = planes[i].getRowStride();
1798                 if (VERBOSE) {
1799                     Log.v(TAG, "rowStride " + rowStride);
1800                     Log.v(TAG, "width " + width);
1801                     Log.v(TAG, "height " + height);
1802                 }
1803                 int h = (i == 0) ? height : height / 2;
1804                 for (int row = 0; row < h; row++) {
1805                     // Each 10-bit pixel occupies 2 bytes
1806                     int length = 2 * width;
1807                     buffer.get(data, offset, length);
1808                     offset += length;
1809                     if (row < h - 1) {
1810                         buffer.position(buffer.position() + rowStride - length);
1811                     }
1812                 }
1813                 if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
1814                 buffer.rewind();
1815             }
1816             return data;
1817         }
1818 
1819         int offset = 0;
1820         data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
1821         int maxRowSize = planes[0].getRowStride();
1822         for (int i = 0; i < planes.length; i++) {
1823             if (maxRowSize < planes[i].getRowStride()) {
1824                 maxRowSize = planes[i].getRowStride();
1825             }
1826         }
1827         byte[] rowData = new byte[maxRowSize];
1828         if(VERBOSE) Log.v(TAG, "get data from " + planes.length + " planes");
1829         for (int i = 0; i < planes.length; i++) {
1830             buffer = planes[i].getBuffer();
1831             assertNotNull("Fail to get bytebuffer from plane", buffer);
1832             buffer.rewind();
1833             rowStride = planes[i].getRowStride();
1834             pixelStride = planes[i].getPixelStride();
1835             assertTrue("pixel stride " + pixelStride + " is invalid", pixelStride > 0);
1836             if (VERBOSE) {
1837                 Log.v(TAG, "pixelStride " + pixelStride);
1838                 Log.v(TAG, "rowStride " + rowStride);
1839                 Log.v(TAG, "width " + width);
1840                 Log.v(TAG, "height " + height);
1841             }
1842             // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling.
1843             int w = (i == 0) ? width : width / 2;
1844             int h = (i == 0) ? height : height / 2;
1845             assertTrue("rowStride " + rowStride + " should be >= width " + w , rowStride >= w);
1846             for (int row = 0; row < h; row++) {
1847                 int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8;
1848                 int length;
1849                 if (pixelStride == bytesPerPixel) {
1850                     // Special case: optimized read of the entire row
1851                     length = w * bytesPerPixel;
1852                     buffer.get(data, offset, length);
1853                     offset += length;
1854                 } else {
1855                     // Generic case: should work for any pixelStride but slower.
1856                     // Use intermediate buffer to avoid read byte-by-byte from
1857                     // DirectByteBuffer, which is very bad for performance
1858                     length = (w - 1) * pixelStride + bytesPerPixel;
1859                     buffer.get(rowData, 0, length);
1860                     for (int col = 0; col < w; col++) {
1861                         data[offset++] = rowData[col * pixelStride];
1862                     }
1863                 }
1864                 // Advance buffer the remainder of the row stride
1865                 if (row < h - 1) {
1866                     buffer.position(buffer.position() + rowStride - length);
1867                 }
1868             }
1869             if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
1870             buffer.rewind();
1871         }
1872         return data;
1873     }
1874 
1875     /**
1876      * <p>Check android image format validity for an image, only support below formats:</p>
1877      *
1878      * <p>YUV_420_888/NV21/YV12, can add more for future</p>
1879      */
checkAndroidImageFormat(Image image)1880     public static void checkAndroidImageFormat(Image image) {
1881         int format = image.getFormat();
1882         Plane[] planes = image.getPlanes();
1883         switch (format) {
1884             case ImageFormat.YUV_420_888:
1885             case ImageFormat.NV21:
1886             case ImageFormat.YV12:
1887             case ImageFormat.YCBCR_P010:
1888                 assertEquals("YUV420 format Images should have 3 planes", 3, planes.length);
1889                 break;
1890             case ImageFormat.JPEG:
1891             case ImageFormat.RAW_SENSOR:
1892             case ImageFormat.RAW_PRIVATE:
1893             case ImageFormat.DEPTH16:
1894             case ImageFormat.DEPTH_POINT_CLOUD:
1895             case ImageFormat.DEPTH_JPEG:
1896             case ImageFormat.Y8:
1897             case ImageFormat.HEIC:
1898             case ImageFormat.JPEG_R:
1899                 assertEquals("JPEG/RAW/depth/Y8 Images should have one plane", 1, planes.length);
1900                 break;
1901             default:
1902                 fail("Unsupported Image Format: " + format);
1903         }
1904     }
1905 
dumpFile(String fileName, Bitmap data)1906     public static void dumpFile(String fileName, Bitmap data) {
1907         FileOutputStream outStream;
1908         try {
1909             Log.v(TAG, "output will be saved as " + fileName);
1910             outStream = new FileOutputStream(fileName);
1911         } catch (IOException ioe) {
1912             throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
1913         }
1914 
1915         try {
1916             data.compress(Bitmap.CompressFormat.JPEG, /*quality*/90, outStream);
1917             outStream.close();
1918         } catch (IOException ioe) {
1919             throw new RuntimeException("failed writing data to file " + fileName, ioe);
1920         }
1921     }
1922 
dumpFile(String fileName, byte[] data)1923     public static void dumpFile(String fileName, byte[] data) {
1924         FileOutputStream outStream;
1925         try {
1926             Log.v(TAG, "output will be saved as " + fileName);
1927             outStream = new FileOutputStream(fileName);
1928         } catch (IOException ioe) {
1929             throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
1930         }
1931 
1932         try {
1933             outStream.write(data);
1934             outStream.close();
1935         } catch (IOException ioe) {
1936             throw new RuntimeException("failed writing data to file " + fileName, ioe);
1937         }
1938     }
1939 
1940     /**
1941      * Get the available output sizes for the user-defined {@code format}.
1942      *
1943      * <p>Note that implementation-defined/hidden formats are not supported.</p>
1944      */
getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager)1945     public static Size[] getSupportedSizeForFormat(int format, String cameraId,
1946             CameraManager cameraManager) throws CameraAccessException {
1947         return getSupportedSizeForFormat(format, cameraId, cameraManager,
1948                 /*maxResolution*/false);
1949     }
1950 
getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager, boolean maxResolution)1951     public static Size[] getSupportedSizeForFormat(int format, String cameraId,
1952             CameraManager cameraManager, boolean maxResolution) throws CameraAccessException {
1953         CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
1954         assertNotNull("Can't get camera characteristics!", properties);
1955         if (VERBOSE) {
1956             Log.v(TAG, "get camera characteristics for camera: " + cameraId);
1957         }
1958         CameraCharacteristics.Key<StreamConfigurationMap> configMapTag = maxResolution ?
1959                 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION :
1960                 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
1961         StreamConfigurationMap configMap = properties.get(configMapTag);
1962         if (configMap == null) {
1963             assertTrue("SCALER_STREAM_CONFIGURATION_MAP is null!", maxResolution);
1964             return null;
1965         }
1966 
1967         Size[] availableSizes = configMap.getOutputSizes(format);
1968         if (!maxResolution) {
1969             assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for format: "
1970                     + format);
1971         }
1972         Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(format);
1973         if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
1974             Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
1975             System.arraycopy(availableSizes, 0, allSizes, 0,
1976                     availableSizes.length);
1977             System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length,
1978                     highResAvailableSizes.length);
1979             availableSizes = allSizes;
1980         }
1981         if (VERBOSE) Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
1982         return availableSizes;
1983     }
1984 
1985     /**
1986      * Get the available output sizes for the given class.
1987      *
1988      */
getSupportedSizeForClass(Class klass, String cameraId, CameraManager cameraManager)1989     public static Size[] getSupportedSizeForClass(Class klass, String cameraId,
1990             CameraManager cameraManager) throws CameraAccessException {
1991         CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
1992         assertNotNull("Can't get camera characteristics!", properties);
1993         if (VERBOSE) {
1994             Log.v(TAG, "get camera characteristics for camera: " + cameraId);
1995         }
1996         StreamConfigurationMap configMap =
1997                 properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
1998         Size[] availableSizes = configMap.getOutputSizes(klass);
1999         assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for class: "
2000                 + klass);
2001         Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(ImageFormat.PRIVATE);
2002         if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
2003             Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
2004             System.arraycopy(availableSizes, 0, allSizes, 0,
2005                     availableSizes.length);
2006             System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length,
2007                     highResAvailableSizes.length);
2008             availableSizes = allSizes;
2009         }
2010         if (VERBOSE) Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
2011         return availableSizes;
2012     }
2013 
2014     /**
2015      * Size comparator that compares the number of pixels it covers.
2016      *
2017      * <p>If two the areas of two sizes are same, compare the widths.</p>
2018      */
2019     public static class SizeComparator implements Comparator<Size> {
2020         @Override
compare(Size lhs, Size rhs)2021         public int compare(Size lhs, Size rhs) {
2022             return CameraUtils
2023                     .compareSizes(lhs.getWidth(), lhs.getHeight(), rhs.getWidth(), rhs.getHeight());
2024         }
2025     }
2026 
2027     /**
2028      * Get sorted size list in descending order. Remove the sizes larger than
2029      * the bound. If the bound is null, don't do the size bound filtering.
2030      */
getSupportedPreviewSizes(String cameraId, CameraManager cameraManager, Size bound)2031     static public List<Size> getSupportedPreviewSizes(String cameraId,
2032             CameraManager cameraManager, Size bound) throws CameraAccessException {
2033 
2034         Size[] rawSizes = getSupportedSizeForClass(android.view.SurfaceHolder.class, cameraId,
2035                 cameraManager);
2036         assertArrayNotEmpty(rawSizes,
2037                 "Available sizes for SurfaceHolder class should not be empty");
2038         if (VERBOSE) {
2039             Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(rawSizes));
2040         }
2041 
2042         if (bound == null) {
2043             return getAscendingOrderSizes(Arrays.asList(rawSizes), /*ascending*/false);
2044         }
2045 
2046         List<Size> sizes = new ArrayList<Size>();
2047         for (Size sz: rawSizes) {
2048             if (sz.getWidth() <= bound.getWidth() && sz.getHeight() <= bound.getHeight()) {
2049                 sizes.add(sz);
2050             }
2051         }
2052         return getAscendingOrderSizes(sizes, /*ascending*/false);
2053     }
2054 
2055     /**
2056      * Get a sorted list of sizes from a given size list.
2057      *
2058      * <p>
2059      * The size is compare by area it covers, if the areas are same, then
2060      * compare the widths.
2061      * </p>
2062      *
2063      * @param sizeList The input size list to be sorted
2064      * @param ascending True if the order is ascending, otherwise descending order
2065      * @return The ordered list of sizes
2066      */
getAscendingOrderSizes(final List<Size> sizeList, boolean ascending)2067     static public List<Size> getAscendingOrderSizes(final List<Size> sizeList, boolean ascending) {
2068         if (sizeList == null) {
2069             throw new IllegalArgumentException("sizeList shouldn't be null");
2070         }
2071 
2072         Comparator<Size> comparator = new SizeComparator();
2073         List<Size> sortedSizes = new ArrayList<Size>();
2074         sortedSizes.addAll(sizeList);
2075         Collections.sort(sortedSizes, comparator);
2076         if (!ascending) {
2077             Collections.reverse(sortedSizes);
2078         }
2079 
2080         return sortedSizes;
2081     }
2082     /**
2083      * Get sorted (descending order) size list for given format. Remove the sizes larger than
2084      * the bound. If the bound is null, don't do the size bound filtering.
2085      */
getSortedSizesForFormat(String cameraId, CameraManager cameraManager, int format, Size bound)2086     static public List<Size> getSortedSizesForFormat(String cameraId,
2087             CameraManager cameraManager, int format, Size bound) throws CameraAccessException {
2088         return getSortedSizesForFormat(cameraId, cameraManager, format, /*maxResolution*/false,
2089                 bound);
2090     }
2091 
2092     /**
2093      * Get sorted (descending order) size list for given format (with an option to get sizes from
2094      * the maximum resolution stream configuration map). Remove the sizes larger than
2095      * the bound. If the bound is null, don't do the size bound filtering.
2096      */
getSortedSizesForFormat(String cameraId, CameraManager cameraManager, int format, boolean maxResolution, Size bound)2097     static public List<Size> getSortedSizesForFormat(String cameraId,
2098             CameraManager cameraManager, int format, boolean maxResolution, Size bound)
2099             throws CameraAccessException {
2100         Comparator<Size> comparator = new SizeComparator();
2101         Size[] sizes = getSupportedSizeForFormat(format, cameraId, cameraManager, maxResolution);
2102         List<Size> sortedSizes = null;
2103         if (bound != null) {
2104             sortedSizes = new ArrayList<Size>(/*capacity*/1);
2105             for (Size sz : sizes) {
2106                 if (comparator.compare(sz, bound) <= 0) {
2107                     sortedSizes.add(sz);
2108                 }
2109             }
2110         } else {
2111             sortedSizes = Arrays.asList(sizes);
2112         }
2113         assertTrue("Supported size list should have at least one element",
2114                 sortedSizes.size() > 0);
2115 
2116         Collections.sort(sortedSizes, comparator);
2117         // Make it in descending order.
2118         Collections.reverse(sortedSizes);
2119         return sortedSizes;
2120     }
2121 
2122     /**
2123      * Get supported video size list for a given camera device.
2124      *
2125      * <p>
2126      * Filter out the sizes that are larger than the bound. If the bound is
2127      * null, don't do the size bound filtering.
2128      * </p>
2129      */
getSupportedVideoSizes(String cameraId, CameraManager cameraManager, Size bound)2130     static public List<Size> getSupportedVideoSizes(String cameraId,
2131             CameraManager cameraManager, Size bound) throws CameraAccessException {
2132 
2133         Size[] rawSizes = getSupportedSizeForClass(android.media.MediaRecorder.class,
2134                 cameraId, cameraManager);
2135         assertArrayNotEmpty(rawSizes,
2136                 "Available sizes for MediaRecorder class should not be empty");
2137         if (VERBOSE) {
2138             Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(rawSizes));
2139         }
2140 
2141         if (bound == null) {
2142             return getAscendingOrderSizes(Arrays.asList(rawSizes), /*ascending*/false);
2143         }
2144 
2145         List<Size> sizes = new ArrayList<Size>();
2146         for (Size sz: rawSizes) {
2147             if (sz.getWidth() <= bound.getWidth() && sz.getHeight() <= bound.getHeight()) {
2148                 sizes.add(sz);
2149             }
2150         }
2151         return getAscendingOrderSizes(sizes, /*ascending*/false);
2152     }
2153 
2154     /**
2155      * Get supported video size list (descending order) for a given camera device.
2156      *
2157      * <p>
2158      * Filter out the sizes that are larger than the bound. If the bound is
2159      * null, don't do the size bound filtering.
2160      * </p>
2161      */
getSupportedStillSizes(String cameraId, CameraManager cameraManager, Size bound)2162     static public List<Size> getSupportedStillSizes(String cameraId,
2163             CameraManager cameraManager, Size bound) throws CameraAccessException {
2164         return getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.JPEG, bound);
2165     }
2166 
getSupportedHeicSizes(String cameraId, CameraManager cameraManager, Size bound)2167     static public List<Size> getSupportedHeicSizes(String cameraId,
2168             CameraManager cameraManager, Size bound) throws CameraAccessException {
2169         return getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.HEIC, bound);
2170     }
2171 
getMinPreviewSize(String cameraId, CameraManager cameraManager)2172     static public Size getMinPreviewSize(String cameraId, CameraManager cameraManager)
2173             throws CameraAccessException {
2174         List<Size> sizes = getSupportedPreviewSizes(cameraId, cameraManager, null);
2175         return sizes.get(sizes.size() - 1);
2176     }
2177 
2178     /**
2179      * Get max supported preview size for a camera device.
2180      */
getMaxPreviewSize(String cameraId, CameraManager cameraManager)2181     static public Size getMaxPreviewSize(String cameraId, CameraManager cameraManager)
2182             throws CameraAccessException {
2183         return getMaxPreviewSize(cameraId, cameraManager, /*bound*/null);
2184     }
2185 
2186     /**
2187      * Get max preview size for a camera device in the supported sizes that are no larger
2188      * than the bound.
2189      */
getMaxPreviewSize(String cameraId, CameraManager cameraManager, Size bound)2190     static public Size getMaxPreviewSize(String cameraId, CameraManager cameraManager, Size bound)
2191             throws CameraAccessException {
2192         List<Size> sizes = getSupportedPreviewSizes(cameraId, cameraManager, bound);
2193         return sizes.get(0);
2194     }
2195 
2196     /**
2197      * Get max depth size for a camera device.
2198      */
getMaxDepthSize(String cameraId, CameraManager cameraManager)2199     static public Size getMaxDepthSize(String cameraId, CameraManager cameraManager)
2200             throws CameraAccessException {
2201         List<Size> sizes = getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.DEPTH16,
2202                 /*bound*/ null);
2203         return sizes.get(0);
2204     }
2205 
2206     /**
2207      * Return the lower size
2208      * @param a first size
2209      *
2210      * @param b second size
2211      *
2212      * @return Size the smaller size
2213      *
2214      * @throws IllegalArgumentException if either param was null.
2215      *
2216      */
getMinSize(Size a, Size b)2217     @NonNull public static Size getMinSize(Size a, Size b) {
2218         if (a == null || b == null) {
2219             throw new IllegalArgumentException("sizes was empty");
2220         }
2221         if (a.getWidth() * a.getHeight() < b.getHeight() * b.getWidth()) {
2222             return a;
2223         }
2224         return b;
2225     }
2226 
2227     /**
2228      * Get the largest size by area.
2229      *
2230      * @param sizes an array of sizes, must have at least 1 element
2231      *
2232      * @return Largest Size
2233      *
2234      * @throws IllegalArgumentException if sizes was null or had 0 elements
2235      */
getMaxSize(Size... sizes)2236     public static Size getMaxSize(Size... sizes) {
2237         if (sizes == null || sizes.length == 0) {
2238             throw new IllegalArgumentException("sizes was empty");
2239         }
2240 
2241         Size sz = sizes[0];
2242         for (Size size : sizes) {
2243             if (size.getWidth() * size.getHeight() > sz.getWidth() * sz.getHeight()) {
2244                 sz = size;
2245             }
2246         }
2247 
2248         return sz;
2249     }
2250 
2251     /**
2252      * Get the largest size by area within (less than) bound
2253      *
2254      * @param sizes an array of sizes, must have at least 1 element
2255      *
2256      * @return Largest Size. Null if no such size exists within bound.
2257      *
2258      * @throws IllegalArgumentException if sizes was null or had 0 elements, or bound is invalid.
2259      */
getMaxSizeWithBound(Size[] sizes, int bound)2260     public static Size getMaxSizeWithBound(Size[] sizes, int bound) {
2261         if (sizes == null || sizes.length == 0) {
2262             throw new IllegalArgumentException("sizes was empty");
2263         }
2264         if (bound <= 0) {
2265             throw new IllegalArgumentException("bound is invalid");
2266         }
2267 
2268         Size sz = null;
2269         for (Size size : sizes) {
2270             if (size.getWidth() * size.getHeight() >= bound) {
2271                 continue;
2272             }
2273 
2274             if (sz == null ||
2275                     size.getWidth() * size.getHeight() > sz.getWidth() * sz.getHeight()) {
2276                 sz = size;
2277             }
2278         }
2279 
2280         return sz;
2281     }
2282 
2283     /**
2284      * Returns true if the given {@code array} contains the given element.
2285      *
2286      * @param array {@code array} to check for {@code elem}
2287      * @param elem {@code elem} to test for
2288      * @return {@code true} if the given element is contained
2289      */
contains(int[] array, int elem)2290     public static boolean contains(int[] array, int elem) {
2291         if (array == null) return false;
2292         for (int i = 0; i < array.length; i++) {
2293             if (elem == array[i]) return true;
2294         }
2295         return false;
2296     }
2297 
contains(long[] array, long elem)2298     public static boolean contains(long[] array, long elem) {
2299         if (array == null) return false;
2300         for (int i = 0; i < array.length; i++) {
2301             if (elem == array[i]) return true;
2302         }
2303         return false;
2304     }
2305 
2306     /**
2307      * Get object array from byte array.
2308      *
2309      * @param array Input byte array to be converted
2310      * @return Byte object array converted from input byte array
2311      */
toObject(byte[] array)2312     public static Byte[] toObject(byte[] array) {
2313         return convertPrimitiveArrayToObjectArray(array, Byte.class);
2314     }
2315 
2316     /**
2317      * Get object array from int array.
2318      *
2319      * @param array Input int array to be converted
2320      * @return Integer object array converted from input int array
2321      */
toObject(int[] array)2322     public static Integer[] toObject(int[] array) {
2323         return convertPrimitiveArrayToObjectArray(array, Integer.class);
2324     }
2325 
2326     /**
2327      * Get object array from float array.
2328      *
2329      * @param array Input float array to be converted
2330      * @return Float object array converted from input float array
2331      */
toObject(float[] array)2332     public static Float[] toObject(float[] array) {
2333         return convertPrimitiveArrayToObjectArray(array, Float.class);
2334     }
2335 
2336     /**
2337      * Get object array from double array.
2338      *
2339      * @param array Input double array to be converted
2340      * @return Double object array converted from input double array
2341      */
toObject(double[] array)2342     public static Double[] toObject(double[] array) {
2343         return convertPrimitiveArrayToObjectArray(array, Double.class);
2344     }
2345 
2346     /**
2347      * Convert a primitive input array into its object array version (e.g. from int[] to Integer[]).
2348      *
2349      * @param array Input array object
2350      * @param wrapperClass The boxed class it converts to
2351      * @return Boxed version of primitive array
2352      */
convertPrimitiveArrayToObjectArray(final Object array, final Class<T> wrapperClass)2353     private static <T> T[] convertPrimitiveArrayToObjectArray(final Object array,
2354             final Class<T> wrapperClass) {
2355         // getLength does the null check and isArray check already.
2356         int arrayLength = Array.getLength(array);
2357         if (arrayLength == 0) {
2358             throw new IllegalArgumentException("Input array shouldn't be empty");
2359         }
2360 
2361         @SuppressWarnings("unchecked")
2362         final T[] result = (T[]) Array.newInstance(wrapperClass, arrayLength);
2363         for (int i = 0; i < arrayLength; i++) {
2364             Array.set(result, i, Array.get(array, i));
2365         }
2366         return result;
2367     }
2368 
2369     /**
2370      * Update one 3A region in capture request builder if that region is supported. Do nothing
2371      * if the specified 3A region is not supported by camera device.
2372      * @param requestBuilder The request to be updated
2373      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
2374      * @param regions The 3A regions to be set
2375      * @param staticInfo static metadata characteristics
2376      */
update3aRegion( CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions, StaticMetadata staticInfo)2377     public static void update3aRegion(
2378             CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions,
2379             StaticMetadata staticInfo)
2380     {
2381         int maxRegions;
2382         CaptureRequest.Key<MeteringRectangle[]> key;
2383 
2384         if (regions == null || regions.length == 0 || staticInfo == null) {
2385             throw new IllegalArgumentException("Invalid input 3A region!");
2386         }
2387 
2388         switch (algoIdx) {
2389             case INDEX_ALGORITHM_AE:
2390                 maxRegions = staticInfo.getAeMaxRegionsChecked();
2391                 key = CaptureRequest.CONTROL_AE_REGIONS;
2392                 break;
2393             case INDEX_ALGORITHM_AWB:
2394                 maxRegions = staticInfo.getAwbMaxRegionsChecked();
2395                 key = CaptureRequest.CONTROL_AWB_REGIONS;
2396                 break;
2397             case INDEX_ALGORITHM_AF:
2398                 maxRegions = staticInfo.getAfMaxRegionsChecked();
2399                 key = CaptureRequest.CONTROL_AF_REGIONS;
2400                 break;
2401             default:
2402                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
2403         }
2404 
2405         if (maxRegions >= regions.length) {
2406             requestBuilder.set(key, regions);
2407         }
2408     }
2409 
2410     /**
2411      * Validate one 3A region in capture result equals to expected region if that region is
2412      * supported. Do nothing if the specified 3A region is not supported by camera device.
2413      * @param result The capture result to be validated
2414      * @param partialResults The partial results to be validated
2415      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
2416      * @param expectRegions The 3A regions expected in capture result
2417      * @param scaleByZoomRatio whether to scale the error threshold by zoom ratio
2418      * @param staticInfo static metadata characteristics
2419      */
validate3aRegion( CaptureResult result, List<CaptureResult> partialResults, int algoIdx, MeteringRectangle[] expectRegions, boolean scaleByZoomRatio, StaticMetadata staticInfo)2420     public static void validate3aRegion(
2421             CaptureResult result, List<CaptureResult> partialResults, int algoIdx,
2422             MeteringRectangle[] expectRegions, boolean scaleByZoomRatio, StaticMetadata staticInfo)
2423     {
2424         // There are multiple cases where result 3A region could be slightly different than the
2425         // request:
2426         // 1. Distortion correction,
2427         // 2. Adding smaller 3a region in the test exposes existing devices' offset is larger
2428         //    than 1.
2429         // 3. Precision loss due to converting to HAL zoom ratio and back
2430         // 4. Error magnification due to active array scale-up when zoom ratio API is used.
2431         //
2432         // To handle all these scenarios, make the threshold larger, and scale the threshold based
2433         // on zoom ratio. The scaling factor should be relatively tight, and shouldn't be smaller
2434         // than 1x.
2435         final int maxCoordOffset = 5;
2436         int maxRegions;
2437         CaptureResult.Key<MeteringRectangle[]> key;
2438         MeteringRectangle[] actualRegion;
2439 
2440         switch (algoIdx) {
2441             case INDEX_ALGORITHM_AE:
2442                 maxRegions = staticInfo.getAeMaxRegionsChecked();
2443                 key = CaptureResult.CONTROL_AE_REGIONS;
2444                 break;
2445             case INDEX_ALGORITHM_AWB:
2446                 maxRegions = staticInfo.getAwbMaxRegionsChecked();
2447                 key = CaptureResult.CONTROL_AWB_REGIONS;
2448                 break;
2449             case INDEX_ALGORITHM_AF:
2450                 maxRegions = staticInfo.getAfMaxRegionsChecked();
2451                 key = CaptureResult.CONTROL_AF_REGIONS;
2452                 break;
2453             default:
2454                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
2455         }
2456 
2457         int maxDist = maxCoordOffset;
2458         if (scaleByZoomRatio) {
2459             Float zoomRatio = result.get(CaptureResult.CONTROL_ZOOM_RATIO);
2460             for (CaptureResult partialResult : partialResults) {
2461                 Float zoomRatioInPartial = partialResult.get(CaptureResult.CONTROL_ZOOM_RATIO);
2462                 if (zoomRatioInPartial != null) {
2463                     assertEquals("CONTROL_ZOOM_RATIO in partial result must match"
2464                             + " that in final result", zoomRatio, zoomRatioInPartial);
2465                 }
2466             }
2467             maxDist = (int)Math.ceil(maxDist * Math.max(zoomRatio / 2, 1.0f));
2468         }
2469 
2470         if (maxRegions > 0)
2471         {
2472             actualRegion = getValueNotNull(result, key);
2473             for (CaptureResult partialResult : partialResults) {
2474                 MeteringRectangle[] actualRegionInPartial = partialResult.get(key);
2475                 if (actualRegionInPartial != null) {
2476                     assertEquals("Key " + key.getName() + " in partial result must match"
2477                             + " that in final result", actualRegionInPartial, actualRegion);
2478                 }
2479             }
2480 
2481             for (int i = 0; i < actualRegion.length; i++) {
2482                 // If the expected region's metering weight is 0, allow the camera device
2483                 // to override it.
2484                 if (expectRegions[i].getMeteringWeight() == 0) {
2485                     continue;
2486                 }
2487 
2488                 Rect a = actualRegion[i].getRect();
2489                 Rect e = expectRegions[i].getRect();
2490 
2491                 if (VERBOSE) {
2492                     Log.v(TAG, "Actual region " + actualRegion[i].toString() +
2493                             ", expected region " + expectRegions[i].toString() +
2494                             ", maxDist " + maxDist);
2495                 }
2496                 assertTrue(
2497                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2498                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2499                     maxDist >= Math.abs(a.left - e.left));
2500 
2501                 assertTrue(
2502                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2503                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2504                     maxDist >= Math.abs(a.right - e.right));
2505 
2506                 assertTrue(
2507                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2508                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2509                     maxDist >= Math.abs(a.top - e.top));
2510                 assertTrue(
2511                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2512                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2513                     maxDist >= Math.abs(a.bottom - e.bottom));
2514             }
2515         }
2516     }
2517 
validateImage(Image image, int width, int height, int format, String filePath)2518     public static void validateImage(Image image, int width, int height, int format,
2519             String filePath) {
2520         validateImage(image, width, height, format, filePath, /*colorSpace*/ null);
2521     }
2522 
2523 
2524     /**
2525      * Validate image based on format and size.
2526      *
2527      * @param image The image to be validated.
2528      * @param width The image width.
2529      * @param height The image height.
2530      * @param format The image format.
2531      * @param filePath The debug dump file path, null if don't want to dump to
2532      *            file.
2533      * @param colorSpace The expected color space of the image, if desired (null otherwise).
2534      * @throws UnsupportedOperationException if calling with an unknown format
2535      */
validateImage(Image image, int width, int height, int format, String filePath, ColorSpace colorSpace)2536     public static void validateImage(Image image, int width, int height, int format,
2537             String filePath, ColorSpace colorSpace) {
2538         checkImage(image, width, height, format, colorSpace);
2539 
2540         /**
2541          * TODO: validate timestamp:
2542          * 1. capture result timestamp against the image timestamp (need
2543          * consider frame drops)
2544          * 2. timestamps should be monotonically increasing for different requests
2545          */
2546         if(VERBOSE) Log.v(TAG, "validating Image");
2547         byte[] data = getDataFromImage(image);
2548         assertTrue("Invalid image data", data != null && data.length > 0);
2549 
2550         switch (format) {
2551             // Clients must be able to process and handle depth jpeg images like any other
2552             // regular jpeg.
2553             case ImageFormat.DEPTH_JPEG:
2554             case ImageFormat.JPEG:
2555                 validateJpegData(data, width, height, filePath, colorSpace);
2556                 break;
2557             case ImageFormat.JPEG_R:
2558                 validateJpegData(data, width, height, filePath, null /*colorSpace*/,
2559                         true /*gainMapPresent*/);
2560                 break;
2561             case ImageFormat.YCBCR_P010:
2562                 validateP010Data(data, width, height, format, image.getTimestamp(), filePath);
2563                 break;
2564             case ImageFormat.YUV_420_888:
2565             case ImageFormat.YV12:
2566                 validateYuvData(data, width, height, format, image.getTimestamp(), filePath);
2567                 break;
2568             case ImageFormat.RAW_SENSOR:
2569                 validateRaw16Data(data, width, height, format, image.getTimestamp(), filePath);
2570                 break;
2571             case ImageFormat.DEPTH16:
2572                 validateDepth16Data(data, width, height, format, image.getTimestamp(), filePath);
2573                 break;
2574             case ImageFormat.DEPTH_POINT_CLOUD:
2575                 validateDepthPointCloudData(data, width, height, format, image.getTimestamp(), filePath);
2576                 break;
2577             case ImageFormat.RAW_PRIVATE:
2578                 validateRawPrivateData(data, width, height, image.getTimestamp(), filePath);
2579                 break;
2580             case ImageFormat.Y8:
2581                 validateY8Data(data, width, height, format, image.getTimestamp(), filePath);
2582                 break;
2583             case ImageFormat.HEIC:
2584                 validateHeicData(data, width, height, filePath);
2585                 break;
2586             default:
2587                 throw new UnsupportedOperationException("Unsupported format for validation: "
2588                         + format);
2589         }
2590     }
2591 
2592     public static class HandlerExecutor implements Executor {
2593         private final Handler mHandler;
2594 
HandlerExecutor(Handler handler)2595         public HandlerExecutor(Handler handler) {
2596             assertNotNull("handler must be valid", handler);
2597             mHandler = handler;
2598         }
2599 
2600         @Override
execute(Runnable runCmd)2601         public void execute(Runnable runCmd) {
2602             mHandler.post(runCmd);
2603         }
2604     }
2605 
2606     /**
2607      * Provide a mock for {@link CameraDevice.StateCallback}.
2608      *
2609      * <p>Only useful because mockito can't mock {@link CameraDevice.StateCallback} which is an
2610      * abstract class.</p>
2611      *
2612      * <p>
2613      * Use this instead of other classes when needing to verify interactions, since
2614      * trying to spy on {@link BlockingStateCallback} (or others) will cause unnecessary extra
2615      * interactions which will cause false test failures.
2616      * </p>
2617      *
2618      */
2619     public static class MockStateCallback extends CameraDevice.StateCallback {
2620 
2621         @Override
onOpened(CameraDevice camera)2622         public void onOpened(CameraDevice camera) {
2623         }
2624 
2625         @Override
onDisconnected(CameraDevice camera)2626         public void onDisconnected(CameraDevice camera) {
2627         }
2628 
2629         @Override
onError(CameraDevice camera, int error)2630         public void onError(CameraDevice camera, int error) {
2631         }
2632 
MockStateCallback()2633         private MockStateCallback() {}
2634 
2635         /**
2636          * Create a Mockito-ready mocked StateCallback.
2637          */
mock()2638         public static MockStateCallback mock() {
2639             return Mockito.spy(new MockStateCallback());
2640         }
2641     }
2642 
validateJpegData(byte[] jpegData, int width, int height, String filePath)2643     public static void validateJpegData(byte[] jpegData, int width, int height, String filePath) {
2644         validateJpegData(jpegData, width, height, filePath, /*colorSpace*/ null);
2645     }
2646 
validateJpegData(byte[] jpegData, int width, int height, String filePath, ColorSpace colorSpace)2647     public static void validateJpegData(byte[] jpegData, int width, int height, String filePath,
2648             ColorSpace colorSpace) {
2649         validateJpegData(jpegData, width, height, filePath, colorSpace, false /*gainMapPresent*/);
2650     }
2651 
validateJpegData(byte[] jpegData, int width, int height, String filePath, ColorSpace colorSpace, boolean gainMapPresent)2652     public static void validateJpegData(byte[] jpegData, int width, int height, String filePath,
2653             ColorSpace colorSpace, boolean gainMapPresent) {
2654         BitmapFactory.Options bmpOptions = new BitmapFactory.Options();
2655         // DecodeBound mode: only parse the frame header to get width/height.
2656         // it doesn't decode the pixel.
2657         bmpOptions.inJustDecodeBounds = true;
2658         BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length, bmpOptions);
2659         assertEquals(width, bmpOptions.outWidth);
2660         assertEquals(height, bmpOptions.outHeight);
2661 
2662         // Pixel decoding mode: decode whole image. check if the image data
2663         // is decodable here.
2664         Bitmap bitmapImage = BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length);
2665         assertNotNull("Decoding jpeg failed", bitmapImage);
2666         if (colorSpace != null) {
2667             ColorSpace bitmapColorSpace = bitmapImage.getColorSpace();
2668             boolean matchingColorSpace = colorSpace.equals(bitmapColorSpace);
2669             if (!matchingColorSpace) {
2670                 Log.e(TAG, "Expected color space:\n\t" + colorSpace);
2671                 Log.e(TAG, "Bitmap color space:\n\t" + bitmapColorSpace);
2672             }
2673             assertTrue("Color space mismatch in decoded jpeg!", matchingColorSpace);
2674         }
2675         if (gainMapPresent) {
2676             Gainmap gainMap = bitmapImage.getGainmap();
2677             assertNotNull(gainMap);
2678             assertNotNull(gainMap.getGainmapContents());
2679         }
2680         if (DEBUG && filePath != null) {
2681             String fileName =
2682                     filePath + "/" + width + "x" + height + ".jpeg";
2683             dumpFile(fileName, jpegData);
2684         }
2685     }
2686 
validateYuvData(byte[] yuvData, int width, int height, int format, long ts, String filePath)2687     private static void validateYuvData(byte[] yuvData, int width, int height, int format,
2688             long ts, String filePath) {
2689         checkYuvFormat(format);
2690         if (VERBOSE) Log.v(TAG, "Validating YUV data");
2691         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2692         assertEquals("Yuv data doesn't match", expectedSize, yuvData.length);
2693 
2694         // TODO: Can add data validation for test pattern.
2695 
2696         if (DEBUG && filePath != null) {
2697             String fileName =
2698                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".yuv";
2699             dumpFile(fileName, yuvData);
2700         }
2701     }
2702 
validateP010Data(byte[] p010Data, int width, int height, int format, long ts, String filePath)2703     private static void validateP010Data(byte[] p010Data, int width, int height, int format,
2704             long ts, String filePath) {
2705         if (VERBOSE) Log.v(TAG, "Validating P010 data");
2706         // The P010 10 bit samples are stored in two bytes so the size needs to be adjusted
2707         // accordingly.
2708         int bytesPerPixelRounded = (ImageFormat.getBitsPerPixel(format) + 7) / 8;
2709         int expectedSize = width * height * bytesPerPixelRounded;
2710         assertEquals("P010 data doesn't match", expectedSize, p010Data.length);
2711 
2712         if (DEBUG && filePath != null) {
2713             String fileName =
2714                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".p010";
2715             dumpFile(fileName, p010Data);
2716         }
2717     }
validateRaw16Data(byte[] rawData, int width, int height, int format, long ts, String filePath)2718     private static void validateRaw16Data(byte[] rawData, int width, int height, int format,
2719             long ts, String filePath) {
2720         if (VERBOSE) Log.v(TAG, "Validating raw data");
2721         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2722         assertEquals("Raw data doesn't match", expectedSize, rawData.length);
2723 
2724         // TODO: Can add data validation for test pattern.
2725 
2726         if (DEBUG && filePath != null) {
2727             String fileName =
2728                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".raw16";
2729             dumpFile(fileName, rawData);
2730         }
2731 
2732         return;
2733     }
2734 
validateY8Data(byte[] rawData, int width, int height, int format, long ts, String filePath)2735     private static void validateY8Data(byte[] rawData, int width, int height, int format,
2736             long ts, String filePath) {
2737         if (VERBOSE) Log.v(TAG, "Validating Y8 data");
2738         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2739         assertEquals("Y8 data doesn't match", expectedSize, rawData.length);
2740 
2741         // TODO: Can add data validation for test pattern.
2742 
2743         if (DEBUG && filePath != null) {
2744             String fileName =
2745                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".y8";
2746             dumpFile(fileName, rawData);
2747         }
2748 
2749         return;
2750     }
2751 
validateRawPrivateData(byte[] rawData, int width, int height, long ts, String filePath)2752     private static void validateRawPrivateData(byte[] rawData, int width, int height,
2753             long ts, String filePath) {
2754         if (VERBOSE) Log.v(TAG, "Validating private raw data");
2755         // Expect each RAW pixel should occupy at least one byte and no more than 30 bytes
2756         int expectedSizeMin = width * height;
2757         int expectedSizeMax = width * height * 30;
2758 
2759         assertTrue("Opaque RAW size " + rawData.length + "out of normal bound [" +
2760                 expectedSizeMin + "," + expectedSizeMax + "]",
2761                 expectedSizeMin <= rawData.length && rawData.length <= expectedSizeMax);
2762 
2763         if (DEBUG && filePath != null) {
2764             String fileName =
2765                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".rawPriv";
2766             dumpFile(fileName, rawData);
2767         }
2768 
2769         return;
2770     }
2771 
validateDepth16Data(byte[] depthData, int width, int height, int format, long ts, String filePath)2772     private static void validateDepth16Data(byte[] depthData, int width, int height, int format,
2773             long ts, String filePath) {
2774 
2775         if (VERBOSE) Log.v(TAG, "Validating depth16 data");
2776         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2777         assertEquals("Depth data doesn't match", expectedSize, depthData.length);
2778 
2779 
2780         if (DEBUG && filePath != null) {
2781             String fileName =
2782                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth16";
2783             dumpFile(fileName, depthData);
2784         }
2785 
2786         return;
2787 
2788     }
2789 
validateDepthPointCloudData(byte[] depthData, int width, int height, int format, long ts, String filePath)2790     private static void validateDepthPointCloudData(byte[] depthData, int width, int height, int format,
2791             long ts, String filePath) {
2792 
2793         if (VERBOSE) Log.v(TAG, "Validating depth point cloud data");
2794 
2795         // Can't validate size since it is variable
2796 
2797         if (DEBUG && filePath != null) {
2798             String fileName =
2799                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth_point_cloud";
2800             dumpFile(fileName, depthData);
2801         }
2802 
2803         return;
2804 
2805     }
2806 
validateHeicData(byte[] heicData, int width, int height, String filePath)2807     private static void validateHeicData(byte[] heicData, int width, int height, String filePath) {
2808         BitmapFactory.Options bmpOptions = new BitmapFactory.Options();
2809         // DecodeBound mode: only parse the frame header to get width/height.
2810         // it doesn't decode the pixel.
2811         bmpOptions.inJustDecodeBounds = true;
2812         BitmapFactory.decodeByteArray(heicData, 0, heicData.length, bmpOptions);
2813         assertEquals(width, bmpOptions.outWidth);
2814         assertEquals(height, bmpOptions.outHeight);
2815 
2816         // Pixel decoding mode: decode whole image. check if the image data
2817         // is decodable here.
2818         assertNotNull("Decoding heic failed",
2819                 BitmapFactory.decodeByteArray(heicData, 0, heicData.length));
2820         if (DEBUG && filePath != null) {
2821             String fileName =
2822                     filePath + "/" + width + "x" + height + ".heic";
2823             dumpFile(fileName, heicData);
2824         }
2825     }
2826 
getValueNotNull(CaptureResult result, CaptureResult.Key<T> key)2827     public static <T> T getValueNotNull(CaptureResult result, CaptureResult.Key<T> key) {
2828         if (result == null) {
2829             throw new IllegalArgumentException("Result must not be null");
2830         }
2831 
2832         T value = result.get(key);
2833         assertNotNull("Value of Key " + key.getName() + "shouldn't be null", value);
2834         return value;
2835     }
2836 
getValueNotNull(CameraCharacteristics characteristics, CameraCharacteristics.Key<T> key)2837     public static <T> T getValueNotNull(CameraCharacteristics characteristics,
2838             CameraCharacteristics.Key<T> key) {
2839         if (characteristics == null) {
2840             throw new IllegalArgumentException("Camera characteristics must not be null");
2841         }
2842 
2843         T value = characteristics.get(key);
2844         assertNotNull("Value of Key " + key.getName() + "shouldn't be null", value);
2845         return value;
2846     }
2847 
2848     /**
2849      * Get a crop region for a given zoom factor and center position.
2850      * <p>
2851      * The center position is normalized position in range of [0, 1.0], where
2852      * (0, 0) represents top left corner, (1.0. 1.0) represents bottom right
2853      * corner. The center position could limit the effective minimal zoom
2854      * factor, for example, if the center position is (0.75, 0.75), the
2855      * effective minimal zoom position becomes 2.0. If the requested zoom factor
2856      * is smaller than 2.0, a crop region with 2.0 zoom factor will be returned.
2857      * </p>
2858      * <p>
2859      * The aspect ratio of the crop region is maintained the same as the aspect
2860      * ratio of active array.
2861      * </p>
2862      *
2863      * @param zoomFactor The zoom factor to generate the crop region, it must be
2864      *            >= 1.0
2865      * @param center The normalized zoom center point that is in the range of [0, 1].
2866      * @param maxZoom The max zoom factor supported by this device.
2867      * @param activeArray The active array size of this device.
2868      * @return crop region for the given normalized center and zoom factor.
2869      */
getCropRegionForZoom(float zoomFactor, final PointF center, final float maxZoom, final Rect activeArray)2870     public static Rect getCropRegionForZoom(float zoomFactor, final PointF center,
2871             final float maxZoom, final Rect activeArray) {
2872         if (zoomFactor < 1.0) {
2873             throw new IllegalArgumentException("zoom factor " + zoomFactor + " should be >= 1.0");
2874         }
2875         if (center.x > 1.0 || center.x < 0) {
2876             throw new IllegalArgumentException("center.x " + center.x
2877                     + " should be in range of [0, 1.0]");
2878         }
2879         if (center.y > 1.0 || center.y < 0) {
2880             throw new IllegalArgumentException("center.y " + center.y
2881                     + " should be in range of [0, 1.0]");
2882         }
2883         if (maxZoom < 1.0) {
2884             throw new IllegalArgumentException("max zoom factor " + maxZoom + " should be >= 1.0");
2885         }
2886         if (activeArray == null) {
2887             throw new IllegalArgumentException("activeArray must not be null");
2888         }
2889 
2890         float minCenterLength = Math.min(Math.min(center.x, 1.0f - center.x),
2891                 Math.min(center.y, 1.0f - center.y));
2892         float minEffectiveZoom =  0.5f / minCenterLength;
2893         if (minEffectiveZoom > maxZoom) {
2894             throw new IllegalArgumentException("Requested center " + center.toString() +
2895                     " has minimal zoomable factor " + minEffectiveZoom + ", which exceeds max"
2896                             + " zoom factor " + maxZoom);
2897         }
2898 
2899         if (zoomFactor < minEffectiveZoom) {
2900             Log.w(TAG, "Requested zoomFactor " + zoomFactor + " < minimal zoomable factor "
2901                     + minEffectiveZoom + ". It will be overwritten by " + minEffectiveZoom);
2902             zoomFactor = minEffectiveZoom;
2903         }
2904 
2905         int cropCenterX = (int)(activeArray.width() * center.x);
2906         int cropCenterY = (int)(activeArray.height() * center.y);
2907         int cropWidth = (int) (activeArray.width() / zoomFactor);
2908         int cropHeight = (int) (activeArray.height() / zoomFactor);
2909 
2910         return new Rect(
2911                 /*left*/cropCenterX - cropWidth / 2,
2912                 /*top*/cropCenterY - cropHeight / 2,
2913                 /*right*/ cropCenterX + cropWidth / 2,
2914                 /*bottom*/cropCenterY + cropHeight / 2);
2915     }
2916 
2917     /**
2918      * Get AeAvailableTargetFpsRanges and sort them in descending order by max fps
2919      *
2920      * @param staticInfo camera static metadata
2921      * @return AeAvailableTargetFpsRanges in descending order by max fps
2922      */
getDescendingTargetFpsRanges(StaticMetadata staticInfo)2923     public static Range<Integer>[] getDescendingTargetFpsRanges(StaticMetadata staticInfo) {
2924         Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked();
2925         Arrays.sort(fpsRanges, new Comparator<Range<Integer>>() {
2926             public int compare(Range<Integer> r1, Range<Integer> r2) {
2927                 return r2.getUpper() - r1.getUpper();
2928             }
2929         });
2930         return fpsRanges;
2931     }
2932 
2933     /**
2934      * Get AeAvailableTargetFpsRanges with max fps not exceeding 30
2935      *
2936      * @param staticInfo camera static metadata
2937      * @return AeAvailableTargetFpsRanges with max fps not exceeding 30
2938      */
getTargetFpsRangesUpTo30(StaticMetadata staticInfo)2939     public static List<Range<Integer>> getTargetFpsRangesUpTo30(StaticMetadata staticInfo) {
2940         Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked();
2941         ArrayList<Range<Integer>> fpsRangesUpTo30 = new ArrayList<Range<Integer>>();
2942         for (Range<Integer> fpsRange : fpsRanges) {
2943             if (fpsRange.getUpper() <= 30) {
2944                 fpsRangesUpTo30.add(fpsRange);
2945             }
2946         }
2947         return fpsRangesUpTo30;
2948     }
2949 
2950     /**
2951      * Get AeAvailableTargetFpsRanges with max fps greater than 30
2952      *
2953      * @param staticInfo camera static metadata
2954      * @return AeAvailableTargetFpsRanges with max fps greater than 30
2955      */
getTargetFpsRangesGreaterThan30(StaticMetadata staticInfo)2956     public static List<Range<Integer>> getTargetFpsRangesGreaterThan30(StaticMetadata staticInfo) {
2957         Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked();
2958         ArrayList<Range<Integer>> fpsRangesGreaterThan30 = new ArrayList<Range<Integer>>();
2959         for (Range<Integer> fpsRange : fpsRanges) {
2960             if (fpsRange.getUpper() > 30) {
2961                 fpsRangesGreaterThan30.add(fpsRange);
2962             }
2963         }
2964         return fpsRangesGreaterThan30;
2965     }
2966 
2967     /**
2968      * Calculate output 3A region from the intersection of input 3A region and cropped region.
2969      *
2970      * @param requestRegions The input 3A regions
2971      * @param cropRect The cropped region
2972      * @return expected 3A regions output in capture result
2973      */
getExpectedOutputRegion( MeteringRectangle[] requestRegions, Rect cropRect)2974     public static MeteringRectangle[] getExpectedOutputRegion(
2975             MeteringRectangle[] requestRegions, Rect cropRect){
2976         MeteringRectangle[] resultRegions = new MeteringRectangle[requestRegions.length];
2977         for (int i = 0; i < requestRegions.length; i++) {
2978             Rect requestRect = requestRegions[i].getRect();
2979             Rect resultRect = new Rect();
2980             boolean intersect = resultRect.setIntersect(requestRect, cropRect);
2981             resultRegions[i] = new MeteringRectangle(
2982                     resultRect,
2983                     intersect ? requestRegions[i].getMeteringWeight() : 0);
2984         }
2985         return resultRegions;
2986     }
2987 
2988     /**
2989      * Copy source image data to destination image.
2990      *
2991      * @param src The source image to be copied from.
2992      * @param dst The destination image to be copied to.
2993      * @throws IllegalArgumentException If the source and destination images have
2994      *             different format, size, or one of the images is not copyable.
2995      */
imageCopy(Image src, Image dst)2996     public static void imageCopy(Image src, Image dst) {
2997         if (src == null || dst == null) {
2998             throw new IllegalArgumentException("Images should be non-null");
2999         }
3000         if (src.getFormat() != dst.getFormat()) {
3001             throw new IllegalArgumentException("Src and dst images should have the same format");
3002         }
3003         if (src.getFormat() == ImageFormat.PRIVATE ||
3004                 dst.getFormat() == ImageFormat.PRIVATE) {
3005             throw new IllegalArgumentException("PRIVATE format images are not copyable");
3006         }
3007 
3008         Size srcSize = new Size(src.getWidth(), src.getHeight());
3009         Size dstSize = new Size(dst.getWidth(), dst.getHeight());
3010         if (!srcSize.equals(dstSize)) {
3011             throw new IllegalArgumentException("source image size " + srcSize + " is different"
3012                     + " with " + "destination image size " + dstSize);
3013         }
3014 
3015         // TODO: check the owner of the dst image, it must be from ImageWriter, other source may
3016         // not be writable. Maybe we should add an isWritable() method in image class.
3017 
3018         Plane[] srcPlanes = src.getPlanes();
3019         Plane[] dstPlanes = dst.getPlanes();
3020         ByteBuffer srcBuffer = null;
3021         ByteBuffer dstBuffer = null;
3022         for (int i = 0; i < srcPlanes.length; i++) {
3023             srcBuffer = srcPlanes[i].getBuffer();
3024             dstBuffer = dstPlanes[i].getBuffer();
3025             int srcPos = srcBuffer.position();
3026             srcBuffer.rewind();
3027             dstBuffer.rewind();
3028             int srcRowStride = srcPlanes[i].getRowStride();
3029             int dstRowStride = dstPlanes[i].getRowStride();
3030             int srcPixStride = srcPlanes[i].getPixelStride();
3031             int dstPixStride = dstPlanes[i].getPixelStride();
3032 
3033             if (srcPixStride > 2 || dstPixStride > 2) {
3034                 throw new IllegalArgumentException("source pixel stride " + srcPixStride +
3035                         " with destination pixel stride " + dstPixStride +
3036                         " is not supported");
3037             }
3038 
3039             if (srcRowStride == dstRowStride && srcPixStride == dstPixStride &&
3040                     srcPixStride == 1) {
3041                 // Fast path, just copy the content in the byteBuffer all together.
3042                 dstBuffer.put(srcBuffer);
3043             } else {
3044                 Size effectivePlaneSize = getEffectivePlaneSizeForImage(src, i);
3045                 int srcRowByteCount = srcRowStride;
3046                 int dstRowByteCount = dstRowStride;
3047                 byte[] srcDataRow = new byte[Math.max(srcRowStride, dstRowStride)];
3048 
3049                 if (srcPixStride == dstPixStride && srcPixStride == 1) {
3050                     // Row by row copy case
3051                     for (int row = 0; row < effectivePlaneSize.getHeight(); row++) {
3052                         if (row == effectivePlaneSize.getHeight() - 1) {
3053                             // Special case for interleaved planes: need handle the last row
3054                             // carefully to avoid memory corruption. Check if we have enough bytes
3055                             // to copy.
3056                             srcRowByteCount = Math.min(srcRowByteCount, srcBuffer.remaining());
3057                             dstRowByteCount = Math.min(dstRowByteCount, dstBuffer.remaining());
3058                         }
3059                         srcBuffer.get(srcDataRow, /*offset*/0, srcRowByteCount);
3060                         dstBuffer.put(srcDataRow, /*offset*/0, dstRowByteCount);
3061                     }
3062                 } else {
3063                     // Row by row per pixel copy case
3064                     byte[] dstDataRow = new byte[dstRowByteCount];
3065                     for (int row = 0; row < effectivePlaneSize.getHeight(); row++) {
3066                         if (row == effectivePlaneSize.getHeight() - 1) {
3067                             // Special case for interleaved planes: need handle the last row
3068                             // carefully to avoid memory corruption. Check if we have enough bytes
3069                             // to copy.
3070                             int remainingBytes = srcBuffer.remaining();
3071                             if (srcRowByteCount > remainingBytes) {
3072                                 srcRowByteCount = remainingBytes;
3073                             }
3074                             remainingBytes = dstBuffer.remaining();
3075                             if (dstRowByteCount > remainingBytes) {
3076                                 dstRowByteCount = remainingBytes;
3077                             }
3078                         }
3079                         srcBuffer.get(srcDataRow, /*offset*/0, srcRowByteCount);
3080                         int pos = dstBuffer.position();
3081                         dstBuffer.get(dstDataRow, /*offset*/0, dstRowByteCount);
3082                         dstBuffer.position(pos);
3083                         for (int x = 0; x < effectivePlaneSize.getWidth(); x++) {
3084                             dstDataRow[x * dstPixStride] = srcDataRow[x * srcPixStride];
3085                         }
3086                         dstBuffer.put(dstDataRow, /*offset*/0, dstRowByteCount);
3087                     }
3088                 }
3089             }
3090             srcBuffer.position(srcPos);
3091             dstBuffer.rewind();
3092         }
3093     }
3094 
getEffectivePlaneSizeForImage(Image image, int planeIdx)3095     private static Size getEffectivePlaneSizeForImage(Image image, int planeIdx) {
3096         switch (image.getFormat()) {
3097             case ImageFormat.YUV_420_888:
3098                 if (planeIdx == 0) {
3099                     return new Size(image.getWidth(), image.getHeight());
3100                 } else {
3101                     return new Size(image.getWidth() / 2, image.getHeight() / 2);
3102                 }
3103             case ImageFormat.JPEG:
3104             case ImageFormat.RAW_SENSOR:
3105             case ImageFormat.RAW10:
3106             case ImageFormat.RAW12:
3107             case ImageFormat.DEPTH16:
3108                 return new Size(image.getWidth(), image.getHeight());
3109             case ImageFormat.PRIVATE:
3110                 return new Size(0, 0);
3111             default:
3112                 throw new UnsupportedOperationException(
3113                         String.format("Invalid image format %d", image.getFormat()));
3114         }
3115     }
3116 
3117     /**
3118      * <p>
3119      * Checks whether the two images are strongly equal.
3120      * </p>
3121      * <p>
3122      * Two images are strongly equal if and only if the data, formats, sizes,
3123      * and timestamps are same. For {@link ImageFormat#PRIVATE PRIVATE} format
3124      * images, the image data is not accessible thus the data comparison is
3125      * effectively skipped as the number of planes is zero.
3126      * </p>
3127      * <p>
3128      * Note that this method compares the pixel data even outside of the crop
3129      * region, which may not be necessary for general use case.
3130      * </p>
3131      *
3132      * @param lhsImg First image to be compared with.
3133      * @param rhsImg Second image to be compared with.
3134      * @return true if the two images are equal, false otherwise.
3135      * @throws IllegalArgumentException If either of image is null.
3136      */
isImageStronglyEqual(Image lhsImg, Image rhsImg)3137     public static boolean isImageStronglyEqual(Image lhsImg, Image rhsImg) {
3138         if (lhsImg == null || rhsImg == null) {
3139             throw new IllegalArgumentException("Images should be non-null");
3140         }
3141 
3142         if (lhsImg.getFormat() != rhsImg.getFormat()) {
3143             Log.i(TAG, "lhsImg format " + lhsImg.getFormat() + " is different with rhsImg format "
3144                     + rhsImg.getFormat());
3145             return false;
3146         }
3147 
3148         if (lhsImg.getWidth() != rhsImg.getWidth()) {
3149             Log.i(TAG, "lhsImg width " + lhsImg.getWidth() + " is different with rhsImg width "
3150                     + rhsImg.getWidth());
3151             return false;
3152         }
3153 
3154         if (lhsImg.getHeight() != rhsImg.getHeight()) {
3155             Log.i(TAG, "lhsImg height " + lhsImg.getHeight() + " is different with rhsImg height "
3156                     + rhsImg.getHeight());
3157             return false;
3158         }
3159 
3160         if (lhsImg.getTimestamp() != rhsImg.getTimestamp()) {
3161             Log.i(TAG, "lhsImg timestamp " + lhsImg.getTimestamp()
3162                     + " is different with rhsImg timestamp " + rhsImg.getTimestamp());
3163             return false;
3164         }
3165 
3166         if (!lhsImg.getCropRect().equals(rhsImg.getCropRect())) {
3167             Log.i(TAG, "lhsImg crop rect " + lhsImg.getCropRect()
3168                     + " is different with rhsImg crop rect " + rhsImg.getCropRect());
3169             return false;
3170         }
3171 
3172         // Compare data inside of the image.
3173         Plane[] lhsPlanes = lhsImg.getPlanes();
3174         Plane[] rhsPlanes = rhsImg.getPlanes();
3175         ByteBuffer lhsBuffer = null;
3176         ByteBuffer rhsBuffer = null;
3177         for (int i = 0; i < lhsPlanes.length; i++) {
3178             lhsBuffer = lhsPlanes[i].getBuffer();
3179             rhsBuffer = rhsPlanes[i].getBuffer();
3180             lhsBuffer.rewind();
3181             rhsBuffer.rewind();
3182             // Special case for YUV420_888 buffer with different layout or
3183             // potentially differently interleaved U/V planes.
3184             if (lhsImg.getFormat() == ImageFormat.YUV_420_888 &&
3185                     (lhsPlanes[i].getPixelStride() != rhsPlanes[i].getPixelStride() ||
3186                      lhsPlanes[i].getRowStride() != rhsPlanes[i].getRowStride() ||
3187                      (lhsPlanes[i].getPixelStride() != 1))) {
3188                 int width = getEffectivePlaneSizeForImage(lhsImg, i).getWidth();
3189                 int height = getEffectivePlaneSizeForImage(lhsImg, i).getHeight();
3190                 int rowSizeL = lhsPlanes[i].getRowStride();
3191                 int rowSizeR = rhsPlanes[i].getRowStride();
3192                 byte[] lhsRow = new byte[rowSizeL];
3193                 byte[] rhsRow = new byte[rowSizeR];
3194                 int pixStrideL = lhsPlanes[i].getPixelStride();
3195                 int pixStrideR = rhsPlanes[i].getPixelStride();
3196                 for (int r = 0; r < height; r++) {
3197                     if (r == height -1) {
3198                         rowSizeL = lhsBuffer.remaining();
3199                         rowSizeR = rhsBuffer.remaining();
3200                     }
3201                     lhsBuffer.get(lhsRow, /*offset*/0, rowSizeL);
3202                     rhsBuffer.get(rhsRow, /*offset*/0, rowSizeR);
3203                     for (int c = 0; c < width; c++) {
3204                         if (lhsRow[c * pixStrideL] != rhsRow[c * pixStrideR]) {
3205                             Log.i(TAG, String.format(
3206                                     "byte buffers for plane %d row %d col %d don't match.",
3207                                     i, r, c));
3208                             return false;
3209                         }
3210                     }
3211                 }
3212             } else {
3213                 // Compare entire buffer directly
3214                 if (!lhsBuffer.equals(rhsBuffer)) {
3215                     Log.i(TAG, "byte buffers for plane " +  i + " don't match.");
3216                     return false;
3217                 }
3218             }
3219         }
3220 
3221         return true;
3222     }
3223 
3224     /**
3225      * Set jpeg related keys in a capture request builder.
3226      *
3227      * @param builder The capture request builder to set the keys inl
3228      * @param exifData The exif data to set.
3229      * @param thumbnailSize The thumbnail size to set.
3230      * @param collector The camera error collector to collect errors.
3231      */
setJpegKeys(CaptureRequest.Builder builder, ExifTestData exifData, Size thumbnailSize, CameraErrorCollector collector)3232     public static void setJpegKeys(CaptureRequest.Builder builder, ExifTestData exifData,
3233             Size thumbnailSize, CameraErrorCollector collector) {
3234         builder.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, thumbnailSize);
3235         builder.set(CaptureRequest.JPEG_GPS_LOCATION, exifData.gpsLocation);
3236         builder.set(CaptureRequest.JPEG_ORIENTATION, exifData.jpegOrientation);
3237         builder.set(CaptureRequest.JPEG_QUALITY, exifData.jpegQuality);
3238         builder.set(CaptureRequest.JPEG_THUMBNAIL_QUALITY,
3239                 exifData.thumbnailQuality);
3240 
3241         // Validate request set and get.
3242         collector.expectEquals("JPEG thumbnail size request set and get should match",
3243                 thumbnailSize, builder.get(CaptureRequest.JPEG_THUMBNAIL_SIZE));
3244         collector.expectTrue("GPS locations request set and get should match.",
3245                 areGpsFieldsEqual(exifData.gpsLocation,
3246                 builder.get(CaptureRequest.JPEG_GPS_LOCATION)));
3247         collector.expectEquals("JPEG orientation request set and get should match",
3248                 exifData.jpegOrientation,
3249                 builder.get(CaptureRequest.JPEG_ORIENTATION));
3250         collector.expectEquals("JPEG quality request set and get should match",
3251                 exifData.jpegQuality, builder.get(CaptureRequest.JPEG_QUALITY));
3252         collector.expectEquals("JPEG thumbnail quality request set and get should match",
3253                 exifData.thumbnailQuality,
3254                 builder.get(CaptureRequest.JPEG_THUMBNAIL_QUALITY));
3255     }
3256 
3257     /**
3258      * Simple validation of JPEG image size and format.
3259      * <p>
3260      * Only validate the image object basic correctness. It is fast, but doesn't actually
3261      * check the buffer data. Assert is used here as it make no sense to
3262      * continue the test if the jpeg image captured has some serious failures.
3263      * </p>
3264      *
3265      * @param image The captured JPEG/HEIC image
3266      * @param expectedSize Expected capture JEPG/HEIC size
3267      * @param format JPEG/HEIC image format
3268      */
basicValidateBlobImage(Image image, Size expectedSize, int format)3269     public static void basicValidateBlobImage(Image image, Size expectedSize, int format) {
3270         Size imageSz = new Size(image.getWidth(), image.getHeight());
3271         assertTrue(
3272                 String.format("Image size doesn't match (expected %s, actual %s) ",
3273                         expectedSize.toString(), imageSz.toString()), expectedSize.equals(imageSz));
3274         assertEquals("Image format should be " + ((format == ImageFormat.HEIC) ? "HEIC" : "JPEG"),
3275                 format, image.getFormat());
3276         assertNotNull("Image plane shouldn't be null", image.getPlanes());
3277         assertEquals("Image plane number should be 1", 1, image.getPlanes().length);
3278 
3279         // Jpeg/Heic decoding validate was done in ImageReaderTest,
3280         // no need to duplicate the test here.
3281     }
3282 
3283     /**
3284      * Verify the EXIF and JPEG related keys in a capture result are expected.
3285      * - Capture request get values are same as were set.
3286      * - capture result's exif data is the same as was set by
3287      *   the capture request.
3288      * - new tags in the result set by the camera service are
3289      *   present and semantically correct.
3290      *
3291      * @param image The output JPEG/HEIC image to verify.
3292      * @param captureResult The capture result to verify.
3293      * @param expectedSize The expected JPEG/HEIC size.
3294      * @param expectedThumbnailSize The expected thumbnail size.
3295      * @param expectedExifData The expected EXIF data
3296      * @param staticInfo The static metadata for the camera device.
3297      * @param allStaticInfo The camera Id to static metadata map for all cameras.
3298      * @param blobFilename The filename to dump the jpeg/heic to.
3299      * @param collector The camera error collector to collect errors.
3300      * @param format JPEG/HEIC format
3301      */
verifyJpegKeys(Image image, CaptureResult captureResult, Size expectedSize, Size expectedThumbnailSize, ExifTestData expectedExifData, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo, CameraErrorCollector collector, String debugFileNameBase, int format)3302     public static void verifyJpegKeys(Image image, CaptureResult captureResult, Size expectedSize,
3303             Size expectedThumbnailSize, ExifTestData expectedExifData, StaticMetadata staticInfo,
3304             HashMap<String, StaticMetadata> allStaticInfo, CameraErrorCollector collector,
3305             String debugFileNameBase, int format) throws Exception {
3306 
3307         basicValidateBlobImage(image, expectedSize, format);
3308 
3309         byte[] blobBuffer = getDataFromImage(image);
3310         // Have to dump into a file to be able to use ExifInterface
3311         String filePostfix = (format == ImageFormat.HEIC ? ".heic" : ".jpeg");
3312         String blobFilename = debugFileNameBase + "/verifyJpegKeys" + filePostfix;
3313         dumpFile(blobFilename, blobBuffer);
3314         ExifInterface exif = new ExifInterface(blobFilename);
3315 
3316         if (expectedThumbnailSize.equals(new Size(0,0))) {
3317             collector.expectTrue("Jpeg shouldn't have thumbnail when thumbnail size is (0, 0)",
3318                     !exif.hasThumbnail());
3319         } else {
3320             collector.expectTrue("Jpeg must have thumbnail for thumbnail size " +
3321                     expectedThumbnailSize, exif.hasThumbnail());
3322         }
3323 
3324         // Validate capture result vs. request
3325         Size resultThumbnailSize = captureResult.get(CaptureResult.JPEG_THUMBNAIL_SIZE);
3326         int orientationTested = expectedExifData.jpegOrientation;
3327         // Legacy shim always doesn't rotate thumbnail size
3328         if ((orientationTested == 90 || orientationTested == 270) &&
3329                 staticInfo.isHardwareLevelAtLeastLimited()) {
3330             int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
3331                     /*defaultValue*/-1);
3332             if (exifOrientation == ExifInterface.ORIENTATION_UNDEFINED) {
3333                 // Device physically rotated image+thumbnail data
3334                 // Expect thumbnail size to be also rotated
3335                 resultThumbnailSize = new Size(resultThumbnailSize.getHeight(),
3336                         resultThumbnailSize.getWidth());
3337             }
3338         }
3339 
3340         collector.expectEquals("JPEG thumbnail size result and request should match",
3341                 expectedThumbnailSize, resultThumbnailSize);
3342         if (collector.expectKeyValueNotNull(captureResult, CaptureResult.JPEG_GPS_LOCATION) !=
3343                 null) {
3344             collector.expectTrue("GPS location result and request should match.",
3345                     areGpsFieldsEqual(expectedExifData.gpsLocation,
3346                     captureResult.get(CaptureResult.JPEG_GPS_LOCATION)));
3347         }
3348         collector.expectEquals("JPEG orientation result and request should match",
3349                 expectedExifData.jpegOrientation,
3350                 captureResult.get(CaptureResult.JPEG_ORIENTATION));
3351         collector.expectEquals("JPEG quality result and request should match",
3352                 expectedExifData.jpegQuality, captureResult.get(CaptureResult.JPEG_QUALITY));
3353         collector.expectEquals("JPEG thumbnail quality result and request should match",
3354                 expectedExifData.thumbnailQuality,
3355                 captureResult.get(CaptureResult.JPEG_THUMBNAIL_QUALITY));
3356 
3357         // Validate other exif tags for all non-legacy devices
3358         if (!staticInfo.isHardwareLevelLegacy()) {
3359             verifyJpegExifExtraTags(exif, expectedSize, captureResult, staticInfo, allStaticInfo,
3360                     collector, expectedExifData);
3361         }
3362     }
3363 
getSurfaceUsage(Surface s)3364     public static Optional<Long> getSurfaceUsage(Surface s) {
3365         if (s == null || !s.isValid()) {
3366             Log.e(TAG, "Invalid Surface!");
3367             return Optional.empty();
3368         }
3369 
3370         long usage = 0;
3371         ImageWriter writer = ImageWriter.newInstance(s, /*maxImages*/1, ImageFormat.YUV_420_888);
3372         try {
3373             Image img = writer.dequeueInputImage();
3374             if (img != null) {
3375                 usage = img.getHardwareBuffer().getUsage();
3376                 img.close();
3377             } else {
3378                 Log.e(TAG, "Unable to dequeue ImageWriter buffer!");
3379                 return Optional.empty();
3380             }
3381         } finally {
3382             writer.close();
3383         }
3384 
3385         return Optional.of(usage);
3386     }
3387 
3388     /**
3389      * Get the degree of an EXIF orientation.
3390      */
getExifOrientationInDegree(int exifOrientation, CameraErrorCollector collector)3391     private static int getExifOrientationInDegree(int exifOrientation,
3392             CameraErrorCollector collector) {
3393         switch (exifOrientation) {
3394             case ExifInterface.ORIENTATION_NORMAL:
3395                 return 0;
3396             case ExifInterface.ORIENTATION_ROTATE_90:
3397                 return 90;
3398             case ExifInterface.ORIENTATION_ROTATE_180:
3399                 return 180;
3400             case ExifInterface.ORIENTATION_ROTATE_270:
3401                 return 270;
3402             default:
3403                 collector.addMessage("It is impossible to get non 0, 90, 180, 270 degress exif" +
3404                         "info based on the request orientation range");
3405                 return 0;
3406         }
3407     }
3408 
3409     /**
3410      * Get all of the supported focal lengths for capture result.
3411      *
3412      * If the camera is a logical camera, return the focal lengths of the logical camera
3413      * and its active physical camera.
3414      *
3415      * If the camera isn't a logical camera, return the focal lengths supported by the
3416      * single camera.
3417      */
getAvailableFocalLengthsForResult(CaptureResult result, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo)3418     public static Set<Float> getAvailableFocalLengthsForResult(CaptureResult result,
3419             StaticMetadata staticInfo,
3420             HashMap<String, StaticMetadata> allStaticInfo) {
3421         Set<Float> focalLengths = new HashSet<Float>();
3422         float[] supportedFocalLengths = staticInfo.getAvailableFocalLengthsChecked();
3423         for (float focalLength : supportedFocalLengths) {
3424             focalLengths.add(focalLength);
3425         }
3426 
3427         if (staticInfo.isLogicalMultiCamera()) {
3428             boolean activePhysicalCameraIdSupported =
3429                     staticInfo.isActivePhysicalCameraIdSupported();
3430             Set<String> physicalCameraIds;
3431             if (activePhysicalCameraIdSupported) {
3432                 String activePhysicalCameraId = result.get(
3433                         CaptureResult.LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID);
3434                 physicalCameraIds = new HashSet<String>();
3435                 physicalCameraIds.add(activePhysicalCameraId);
3436             } else {
3437                 physicalCameraIds = staticInfo.getCharacteristics().getPhysicalCameraIds();
3438             }
3439 
3440             for (String physicalCameraId : physicalCameraIds) {
3441                 StaticMetadata physicalStaticInfo = allStaticInfo.get(physicalCameraId);
3442                 if (physicalStaticInfo != null) {
3443                     float[] focalLengthsArray =
3444                             physicalStaticInfo.getAvailableFocalLengthsChecked();
3445                     for (float focalLength: focalLengthsArray) {
3446                         focalLengths.add(focalLength);
3447                     }
3448                 }
3449             }
3450         }
3451 
3452         return focalLengths;
3453     }
3454 
3455     /**
3456      * Validate and return the focal length.
3457      *
3458      * @param result Capture result to get the focal length
3459      * @param supportedFocalLengths Valid focal lengths to check the result focal length against
3460      * @param collector The camera error collector
3461      * @return Focal length from capture result or -1 if focal length is not available.
3462      */
validateFocalLength(CaptureResult result, Set<Float> supportedFocalLengths, CameraErrorCollector collector)3463     private static float validateFocalLength(CaptureResult result,
3464             Set<Float> supportedFocalLengths, CameraErrorCollector collector) {
3465         Float resultFocalLength = result.get(CaptureResult.LENS_FOCAL_LENGTH);
3466         if (collector.expectTrue("Focal length is invalid",
3467                 resultFocalLength != null && resultFocalLength > 0)) {
3468             collector.expectTrue("Focal length should be one of the available focal length",
3469                     supportedFocalLengths.contains(resultFocalLength));
3470             return resultFocalLength;
3471         }
3472         return -1;
3473     }
3474 
3475     /**
3476      * Get all of the supported apertures for capture result.
3477      *
3478      * If the camera is a logical camera, return the apertures of the logical camera
3479      * and its active physical camera.
3480      *
3481      * If the camera isn't a logical camera, return the apertures supported by the
3482      * single camera.
3483      */
getAvailableAperturesForResult(CaptureResult result, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo)3484     private static Set<Float> getAvailableAperturesForResult(CaptureResult result,
3485             StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo) {
3486         Set<Float> allApertures = new HashSet<Float>();
3487         float[] supportedApertures = staticInfo.getAvailableAperturesChecked();
3488         for (float aperture : supportedApertures) {
3489             allApertures.add(aperture);
3490         }
3491 
3492         if (staticInfo.isLogicalMultiCamera()) {
3493             boolean activePhysicalCameraIdSupported =
3494                     staticInfo.isActivePhysicalCameraIdSupported();
3495             Set<String> physicalCameraIds;
3496             if (activePhysicalCameraIdSupported) {
3497                 String activePhysicalCameraId = result.get(
3498                         CaptureResult.LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID);
3499                 physicalCameraIds = new HashSet<String>();
3500                 physicalCameraIds.add(activePhysicalCameraId);
3501             } else {
3502                 physicalCameraIds = staticInfo.getCharacteristics().getPhysicalCameraIds();
3503             }
3504 
3505             for (String physicalCameraId : physicalCameraIds) {
3506                 StaticMetadata physicalStaticInfo = allStaticInfo.get(physicalCameraId);
3507                 if (physicalStaticInfo != null) {
3508                     float[] apertures = physicalStaticInfo.getAvailableAperturesChecked();
3509                     for (float aperture: apertures) {
3510                         allApertures.add(aperture);
3511                     }
3512                 }
3513             }
3514         }
3515 
3516         return allApertures;
3517     }
3518 
3519     /**
3520      * Validate and return the aperture.
3521      *
3522      * @param result Capture result to get the aperture
3523      * @return Aperture from capture result or -1 if aperture is not available.
3524      */
validateAperture(CaptureResult result, Set<Float> supportedApertures, CameraErrorCollector collector)3525     private static float validateAperture(CaptureResult result,
3526             Set<Float> supportedApertures, CameraErrorCollector collector) {
3527         Float resultAperture = result.get(CaptureResult.LENS_APERTURE);
3528         if (collector.expectTrue("Capture result aperture is invalid",
3529                 resultAperture != null && resultAperture > 0)) {
3530             collector.expectTrue("Aperture should be one of the available apertures",
3531                     supportedApertures.contains(resultAperture));
3532             return resultAperture;
3533         }
3534         return -1;
3535     }
3536 
3537     /**
3538      * Return the closest value in a Set of floats.
3539      */
getClosestValueInSet(Set<Float> values, float target)3540     private static float getClosestValueInSet(Set<Float> values, float target) {
3541         float minDistance = Float.MAX_VALUE;
3542         float closestValue = -1.0f;
3543         for(float value : values) {
3544             float distance = Math.abs(value - target);
3545             if (minDistance > distance) {
3546                 minDistance = distance;
3547                 closestValue = value;
3548             }
3549         }
3550 
3551         return closestValue;
3552     }
3553 
3554     /**
3555      * Return if two Location's GPS field are the same.
3556      */
areGpsFieldsEqual(Location a, Location b)3557     private static boolean areGpsFieldsEqual(Location a, Location b) {
3558         if (a == null || b == null) {
3559             return false;
3560         }
3561 
3562         return a.getTime() == b.getTime() && a.getLatitude() == b.getLatitude() &&
3563                 a.getLongitude() == b.getLongitude() && a.getAltitude() == b.getAltitude() &&
3564                 a.getProvider() == b.getProvider();
3565     }
3566 
3567     /**
3568      * Verify extra tags in JPEG EXIF
3569      */
verifyJpegExifExtraTags(ExifInterface exif, Size jpegSize, CaptureResult result, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo, CameraErrorCollector collector, ExifTestData expectedExifData)3570     private static void verifyJpegExifExtraTags(ExifInterface exif, Size jpegSize,
3571             CaptureResult result, StaticMetadata staticInfo,
3572             HashMap<String, StaticMetadata> allStaticInfo,
3573             CameraErrorCollector collector, ExifTestData expectedExifData)
3574             throws ParseException {
3575         /**
3576          * TAG_IMAGE_WIDTH and TAG_IMAGE_LENGTH and TAG_ORIENTATION.
3577          * Orientation and exif width/height need to be tested carefully, two cases:
3578          *
3579          * 1. Device rotate the image buffer physically, then exif width/height may not match
3580          * the requested still capture size, we need swap them to check.
3581          *
3582          * 2. Device use the exif tag to record the image orientation, it doesn't rotate
3583          * the jpeg image buffer itself. In this case, the exif width/height should always match
3584          * the requested still capture size, and the exif orientation should always match the
3585          * requested orientation.
3586          *
3587          */
3588         int exifWidth = exif.getAttributeInt(ExifInterface.TAG_IMAGE_WIDTH, /*defaultValue*/0);
3589         int exifHeight = exif.getAttributeInt(ExifInterface.TAG_IMAGE_LENGTH, /*defaultValue*/0);
3590         Size exifSize = new Size(exifWidth, exifHeight);
3591         // Orientation could be missing, which is ok, default to 0.
3592         int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
3593                 /*defaultValue*/-1);
3594         // Get requested orientation from result, because they should be same.
3595         if (collector.expectKeyValueNotNull(result, CaptureResult.JPEG_ORIENTATION) != null) {
3596             int requestedOrientation = result.get(CaptureResult.JPEG_ORIENTATION);
3597             final int ORIENTATION_MIN = ExifInterface.ORIENTATION_UNDEFINED;
3598             final int ORIENTATION_MAX = ExifInterface.ORIENTATION_ROTATE_270;
3599             boolean orientationValid = collector.expectTrue(String.format(
3600                     "Exif orientation must be in range of [%d, %d]",
3601                     ORIENTATION_MIN, ORIENTATION_MAX),
3602                     exifOrientation >= ORIENTATION_MIN && exifOrientation <= ORIENTATION_MAX);
3603             if (orientationValid) {
3604                 /**
3605                  * Device captured image doesn't respect the requested orientation,
3606                  * which means it rotates the image buffer physically. Then we
3607                  * should swap the exif width/height accordingly to compare.
3608                  */
3609                 boolean deviceRotatedImage = exifOrientation == ExifInterface.ORIENTATION_UNDEFINED;
3610 
3611                 if (deviceRotatedImage) {
3612                     // Case 1.
3613                     boolean needSwap = (requestedOrientation % 180 == 90);
3614                     if (needSwap) {
3615                         exifSize = new Size(exifHeight, exifWidth);
3616                     }
3617                 } else {
3618                     // Case 2.
3619                     collector.expectEquals("Exif orientaiton should match requested orientation",
3620                             requestedOrientation, getExifOrientationInDegree(exifOrientation,
3621                             collector));
3622                 }
3623             }
3624         }
3625 
3626         /**
3627          * Ideally, need check exifSize == jpegSize == actual buffer size. But
3628          * jpegSize == jpeg decode bounds size(from jpeg jpeg frame
3629          * header, not exif) was validated in ImageReaderTest, no need to
3630          * validate again here.
3631          */
3632         collector.expectEquals("Exif size should match jpeg capture size", jpegSize, exifSize);
3633 
3634         // TAG_DATETIME, it should be local time
3635         long currentTimeInMs = System.currentTimeMillis();
3636         long currentTimeInSecond = currentTimeInMs / 1000;
3637         Date date = new Date(currentTimeInMs);
3638         String localDatetime = new SimpleDateFormat("yyyy:MM:dd HH:").format(date);
3639         String dateTime = exif.getAttribute(ExifInterface.TAG_DATETIME);
3640         if (collector.expectTrue("Exif TAG_DATETIME shouldn't be null", dateTime != null)) {
3641             collector.expectTrue("Exif TAG_DATETIME is wrong",
3642                     dateTime.length() == EXIF_DATETIME_LENGTH);
3643             long exifTimeInSecond =
3644                     new SimpleDateFormat("yyyy:MM:dd HH:mm:ss").parse(dateTime).getTime() / 1000;
3645             long delta = currentTimeInSecond - exifTimeInSecond;
3646             collector.expectTrue("Capture time deviates too much from the current time",
3647                     Math.abs(delta) < EXIF_DATETIME_ERROR_MARGIN_SEC);
3648             // It should be local time.
3649             collector.expectTrue("Exif date time should be local time",
3650                     dateTime.startsWith(localDatetime));
3651         }
3652 
3653         boolean isExternalCamera = staticInfo.isExternalCamera();
3654         if (!isExternalCamera) {
3655             // TAG_FOCAL_LENGTH.
3656             Set<Float> focalLengths = getAvailableFocalLengthsForResult(
3657                     result, staticInfo, allStaticInfo);
3658             float exifFocalLength = (float)exif.getAttributeDouble(
3659                         ExifInterface.TAG_FOCAL_LENGTH, -1);
3660             collector.expectEquals("Focal length should match",
3661                     getClosestValueInSet(focalLengths, exifFocalLength),
3662                     exifFocalLength, EXIF_FOCAL_LENGTH_ERROR_MARGIN);
3663             // More checks for focal length.
3664             collector.expectEquals("Exif focal length should match capture result",
3665                     validateFocalLength(result, focalLengths, collector),
3666                     exifFocalLength, EXIF_FOCAL_LENGTH_ERROR_MARGIN);
3667 
3668             // TAG_EXPOSURE_TIME
3669             // ExifInterface API gives exposure time value in the form of float instead of rational
3670             String exposureTime = exif.getAttribute(ExifInterface.TAG_EXPOSURE_TIME);
3671             collector.expectNotNull("Exif TAG_EXPOSURE_TIME shouldn't be null", exposureTime);
3672             if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_EXPOSURE_TIME)) {
3673                 if (exposureTime != null) {
3674                     double exposureTimeValue = Double.parseDouble(exposureTime);
3675                     long expTimeResult = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
3676                     double expected = expTimeResult / 1e9;
3677                     double tolerance = expected * EXIF_EXPOSURE_TIME_ERROR_MARGIN_RATIO;
3678                     tolerance = Math.max(tolerance, EXIF_EXPOSURE_TIME_MIN_ERROR_MARGIN_SEC);
3679                     collector.expectEquals("Exif exposure time doesn't match", expected,
3680                             exposureTimeValue, tolerance);
3681                 }
3682             }
3683 
3684             // TAG_APERTURE
3685             // ExifInterface API gives aperture value in the form of float instead of rational
3686             String exifAperture = exif.getAttribute(ExifInterface.TAG_APERTURE);
3687             collector.expectNotNull("Exif TAG_APERTURE shouldn't be null", exifAperture);
3688             if (staticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_AVAILABLE_APERTURES)) {
3689                 Set<Float> apertures = getAvailableAperturesForResult(
3690                         result, staticInfo, allStaticInfo);
3691                 if (exifAperture != null) {
3692                     float apertureValue = Float.parseFloat(exifAperture);
3693                     collector.expectEquals("Aperture value should match",
3694                             getClosestValueInSet(apertures, apertureValue),
3695                             apertureValue, EXIF_APERTURE_ERROR_MARGIN);
3696                     // More checks for aperture.
3697                     collector.expectEquals("Exif aperture length should match capture result",
3698                             validateAperture(result, apertures, collector),
3699                             apertureValue, EXIF_APERTURE_ERROR_MARGIN);
3700                 }
3701             }
3702 
3703             // TAG_MAKE
3704             String make = exif.getAttribute(ExifInterface.TAG_MAKE);
3705             collector.expectEquals("Exif TAG_MAKE is incorrect", Build.MANUFACTURER, make);
3706 
3707             // TAG_MODEL
3708             String model = exif.getAttribute(ExifInterface.TAG_MODEL);
3709             collector.expectEquals("Exif TAG_MODEL is incorrect", Build.MODEL, model);
3710 
3711 
3712             // TAG_ISO
3713             int iso = exif.getAttributeInt(ExifInterface.TAG_ISO, /*defaultValue*/-1);
3714             if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_SENSITIVITY) ||
3715                     staticInfo.areKeysAvailable(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
3716                 int expectedIso = 100;
3717                 if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_SENSITIVITY)) {
3718                     expectedIso = result.get(CaptureResult.SENSOR_SENSITIVITY);
3719                 }
3720                 if (staticInfo.areKeysAvailable(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
3721                     expectedIso = expectedIso *
3722                             result.get(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST);
3723                 } else {
3724                     expectedIso *= 100;
3725                 }
3726                 collector.expectInRange("Exif TAG_ISO is incorrect", iso,
3727                         expectedIso/100,((expectedIso + 50)/100) + MAX_ISO_MISMATCH);
3728             }
3729         } else {
3730             // External camera specific checks
3731             // TAG_MAKE
3732             String make = exif.getAttribute(ExifInterface.TAG_MAKE);
3733             collector.expectNotNull("Exif TAG_MAKE is null", make);
3734 
3735             // TAG_MODEL
3736             String model = exif.getAttribute(ExifInterface.TAG_MODEL);
3737             collector.expectNotNull("Exif TAG_MODEL is nuill", model);
3738         }
3739 
3740 
3741         /**
3742          * TAG_FLASH. TODO: For full devices, can check a lot more info
3743          * (http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/EXIF.html#Flash)
3744          */
3745         String flash = exif.getAttribute(ExifInterface.TAG_FLASH);
3746         collector.expectNotNull("Exif TAG_FLASH shouldn't be null", flash);
3747 
3748         /**
3749          * TAG_WHITE_BALANCE. TODO: For full devices, with the DNG tags, we
3750          * should be able to cross-check android.sensor.referenceIlluminant.
3751          */
3752         String whiteBalance = exif.getAttribute(ExifInterface.TAG_WHITE_BALANCE);
3753         collector.expectNotNull("Exif TAG_WHITE_BALANCE shouldn't be null", whiteBalance);
3754 
3755         // TAG_DATETIME_DIGITIZED (a.k.a Create time for digital cameras).
3756         String digitizedTime = exif.getAttribute(ExifInterface.TAG_DATETIME_DIGITIZED);
3757         collector.expectNotNull("Exif TAG_DATETIME_DIGITIZED shouldn't be null", digitizedTime);
3758         if (digitizedTime != null) {
3759             String expectedDateTime = exif.getAttribute(ExifInterface.TAG_DATETIME);
3760             collector.expectNotNull("Exif TAG_DATETIME shouldn't be null", expectedDateTime);
3761             if (expectedDateTime != null) {
3762                 collector.expectEquals("dataTime should match digitizedTime",
3763                         expectedDateTime, digitizedTime);
3764             }
3765         }
3766 
3767         /**
3768          * TAG_SUBSEC_TIME. Since the sub second tag strings are truncated to at
3769          * most 9 digits in ExifInterface implementation, use getAttributeInt to
3770          * sanitize it. When the default value -1 is returned, it means that
3771          * this exif tag either doesn't exist or is a non-numerical invalid
3772          * string. Same rule applies to the rest of sub second tags.
3773          */
3774         int subSecTime = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME, /*defaultValue*/-1);
3775         collector.expectTrue("Exif TAG_SUBSEC_TIME value is null or invalid!", subSecTime >= 0);
3776 
3777         // TAG_SUBSEC_TIME_ORIG
3778         int subSecTimeOrig = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME_ORIG,
3779                 /*defaultValue*/-1);
3780         collector.expectTrue("Exif TAG_SUBSEC_TIME_ORIG value is null or invalid!",
3781                 subSecTimeOrig >= 0);
3782 
3783         // TAG_SUBSEC_TIME_DIG
3784         int subSecTimeDig = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME_DIG,
3785                 /*defaultValue*/-1);
3786         collector.expectTrue(
3787                 "Exif TAG_SUBSEC_TIME_DIG value is null or invalid!", subSecTimeDig >= 0);
3788 
3789         /**
3790          * TAG_GPS_DATESTAMP & TAG_GPS_TIMESTAMP.
3791          * The GPS timestamp information should be in seconds UTC time.
3792          */
3793         String gpsDatestamp = exif.getAttribute(ExifInterface.TAG_GPS_DATESTAMP);
3794         collector.expectNotNull("Exif TAG_GPS_DATESTAMP shouldn't be null", gpsDatestamp);
3795         String gpsTimestamp = exif.getAttribute(ExifInterface.TAG_GPS_TIMESTAMP);
3796         collector.expectNotNull("Exif TAG_GPS_TIMESTAMP shouldn't be null", gpsTimestamp);
3797 
3798         SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy:MM:dd hh:mm:ss z");
3799         String gpsExifTimeString = gpsDatestamp + " " + gpsTimestamp + " UTC";
3800         Date gpsDateTime = dateFormat.parse(gpsExifTimeString);
3801         Date expected = new Date(expectedExifData.gpsLocation.getTime());
3802         collector.expectEquals("Jpeg EXIF GPS time should match", expected, gpsDateTime);
3803     }
3804 
3805 
3806     /**
3807      * Immutable class wrapping the exif test data.
3808      */
3809     public static class ExifTestData {
3810         public final Location gpsLocation;
3811         public final int jpegOrientation;
3812         public final byte jpegQuality;
3813         public final byte thumbnailQuality;
3814 
ExifTestData(Location location, int orientation, byte jpgQuality, byte thumbQuality)3815         public ExifTestData(Location location, int orientation,
3816                 byte jpgQuality, byte thumbQuality) {
3817             gpsLocation = location;
3818             jpegOrientation = orientation;
3819             jpegQuality = jpgQuality;
3820             thumbnailQuality = thumbQuality;
3821         }
3822     }
3823 
getPreviewSizeBound(WindowManager windowManager, Size bound)3824     public static Size getPreviewSizeBound(WindowManager windowManager, Size bound) {
3825         WindowMetrics windowMetrics = windowManager.getCurrentWindowMetrics();
3826         Rect windowBounds = windowMetrics.getBounds();
3827 
3828         int windowHeight = windowBounds.height();
3829         int windowWidth = windowBounds.width();
3830 
3831         if (windowHeight > windowWidth) {
3832             windowHeight = windowWidth;
3833             windowWidth = windowBounds.height();
3834         }
3835 
3836         if (bound.getWidth() <= windowWidth
3837                 && bound.getHeight() <= windowHeight) {
3838             return bound;
3839         } else {
3840             return new Size(windowWidth, windowHeight);
3841         }
3842     }
3843 
3844     /**
3845      * Check if a particular stream configuration is supported by configuring it
3846      * to the device.
3847      */
isStreamConfigurationSupported(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)3848     public static boolean isStreamConfigurationSupported(CameraDevice camera,
3849             List<Surface> outputSurfaces,
3850             CameraCaptureSession.StateCallback listener, Handler handler) {
3851         try {
3852             configureCameraSession(camera, outputSurfaces, listener, handler);
3853             return true;
3854         } catch (Exception e) {
3855             Log.i(TAG, "This stream configuration is not supported due to " + e.getMessage());
3856             return false;
3857         }
3858     }
3859 
3860     public final static class SessionConfigSupport {
3861         public final boolean error;
3862         public final boolean callSupported;
3863         public final boolean configSupported;
3864 
SessionConfigSupport(boolean error, boolean callSupported, boolean configSupported)3865         public SessionConfigSupport(boolean error,
3866                 boolean callSupported, boolean configSupported) {
3867             this.error = error;
3868             this.callSupported = callSupported;
3869             this.configSupported = configSupported;
3870         }
3871     }
3872 
3873     /**
3874      * Query whether a particular stream combination is supported.
3875      */
checkSessionConfigurationWithSurfaces(CameraDevice camera, Handler handler, List<Surface> outputSurfaces, InputConfiguration inputConfig, int operatingMode, boolean defaultSupport, String msg)3876     public static void checkSessionConfigurationWithSurfaces(CameraDevice camera,
3877             Handler handler, List<Surface> outputSurfaces, InputConfiguration inputConfig,
3878             int operatingMode, boolean defaultSupport, String msg) {
3879         List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size());
3880         for (Surface surface : outputSurfaces) {
3881             outConfigurations.add(new OutputConfiguration(surface));
3882         }
3883 
3884         checkSessionConfigurationSupported(camera, handler, outConfigurations,
3885                 inputConfig, operatingMode, defaultSupport, msg);
3886     }
3887 
checkSessionConfigurationSupported(CameraDevice camera, Handler handler, List<OutputConfiguration> outputConfigs, InputConfiguration inputConfig, int operatingMode, boolean defaultSupport, String msg)3888     public static void checkSessionConfigurationSupported(CameraDevice camera,
3889             Handler handler, List<OutputConfiguration> outputConfigs,
3890             InputConfiguration inputConfig, int operatingMode, boolean defaultSupport,
3891             String msg) {
3892         SessionConfigSupport sessionConfigSupported =
3893                 isSessionConfigSupported(camera, handler, outputConfigs, inputConfig,
3894                 operatingMode, defaultSupport);
3895 
3896         assertTrue(msg, !sessionConfigSupported.error && sessionConfigSupported.configSupported);
3897     }
3898 
3899     /**
3900      * Query whether a particular stream combination is supported.
3901      */
isSessionConfigSupported(CameraDevice camera, Handler handler, List<OutputConfiguration> outputConfigs, InputConfiguration inputConfig, int operatingMode, boolean defaultSupport)3902     public static SessionConfigSupport isSessionConfigSupported(CameraDevice camera,
3903             Handler handler, List<OutputConfiguration> outputConfigs,
3904             InputConfiguration inputConfig, int operatingMode, boolean defaultSupport) {
3905         boolean ret;
3906         BlockingSessionCallback sessionListener = new BlockingSessionCallback();
3907 
3908         SessionConfiguration sessionConfig = new SessionConfiguration(operatingMode, outputConfigs,
3909                 new HandlerExecutor(handler), sessionListener);
3910         if (inputConfig != null) {
3911             sessionConfig.setInputConfiguration(inputConfig);
3912         }
3913 
3914         try {
3915             ret = camera.isSessionConfigurationSupported(sessionConfig);
3916         } catch (UnsupportedOperationException e) {
3917             // Camera doesn't support session configuration query
3918             return new SessionConfigSupport(false/*error*/,
3919                     false/*callSupported*/, defaultSupport/*configSupported*/);
3920         } catch (IllegalArgumentException e) {
3921             return new SessionConfigSupport(true/*error*/,
3922                     false/*callSupported*/, false/*configSupported*/);
3923         } catch (android.hardware.camera2.CameraAccessException e) {
3924             return new SessionConfigSupport(true/*error*/,
3925                     false/*callSupported*/, false/*configSupported*/);
3926         }
3927 
3928         return new SessionConfigSupport(false/*error*/,
3929                 true/*callSupported*/, ret/*configSupported*/);
3930     }
3931 
3932     /**
3933      * Wait for numResultWait frames
3934      *
3935      * @param resultListener The capture listener to get capture result back.
3936      * @param numResultsWait Number of frame to wait
3937      * @param timeout Wait timeout in ms.
3938      *
3939      * @return the last result, or {@code null} if there was none
3940      */
waitForNumResults(SimpleCaptureCallback resultListener, int numResultsWait, int timeout)3941     public static CaptureResult waitForNumResults(SimpleCaptureCallback resultListener,
3942             int numResultsWait, int timeout) {
3943         if (numResultsWait < 0 || resultListener == null) {
3944             throw new IllegalArgumentException(
3945                     "Input must be positive number and listener must be non-null");
3946         }
3947 
3948         CaptureResult result = null;
3949         for (int i = 0; i < numResultsWait; i++) {
3950             result = resultListener.getCaptureResult(timeout);
3951         }
3952 
3953         return result;
3954     }
3955 
3956     /**
3957      * Wait for any expected result key values available in a certain number of results.
3958      *
3959      * <p>
3960      * Check the result immediately if numFramesWait is 0.
3961      * </p>
3962      *
3963      * @param listener The capture listener to get capture result.
3964      * @param resultKey The capture result key associated with the result value.
3965      * @param expectedValues The list of result value need to be waited for,
3966      * return immediately if the list is empty.
3967      * @param numResultsWait Number of frame to wait before times out.
3968      * @param timeout result wait time out in ms.
3969      * @throws TimeoutRuntimeException If more than numResultsWait results are.
3970      * seen before the result matching myRequest arrives, or each individual wait
3971      * for result times out after 'timeout' ms.
3972      */
waitForAnyResultValue(SimpleCaptureCallback listener, CaptureResult.Key<T> resultKey, List<T> expectedValues, int numResultsWait, int timeout)3973     public static <T> void waitForAnyResultValue(SimpleCaptureCallback listener,
3974             CaptureResult.Key<T> resultKey, List<T> expectedValues, int numResultsWait,
3975             int timeout) {
3976         if (numResultsWait < 0 || listener == null || expectedValues == null) {
3977             throw new IllegalArgumentException(
3978                     "Input must be non-negative number and listener/expectedValues "
3979                     + "must be non-null");
3980         }
3981 
3982         int i = 0;
3983         CaptureResult result;
3984         do {
3985             result = listener.getCaptureResult(timeout);
3986             T value = result.get(resultKey);
3987             for ( T expectedValue : expectedValues) {
3988                 if (VERBOSE) {
3989                     Log.v(TAG, "Current result value for key " + resultKey.getName() + " is: "
3990                             + value.toString());
3991                 }
3992                 if (value.equals(expectedValue)) {
3993                     return;
3994                 }
3995             }
3996         } while (i++ < numResultsWait);
3997 
3998         throw new TimeoutRuntimeException(
3999                 "Unable to get the expected result value " + expectedValues + " for key " +
4000                         resultKey.getName() + " after waiting for " + numResultsWait + " results");
4001     }
4002 
4003     /**
4004      * Wait for expected result key value available in a certain number of results.
4005      *
4006      * <p>
4007      * Check the result immediately if numFramesWait is 0.
4008      * </p>
4009      *
4010      * @param listener The capture listener to get capture result
4011      * @param resultKey The capture result key associated with the result value
4012      * @param expectedValue The result value need to be waited for
4013      * @param numResultsWait Number of frame to wait before times out
4014      * @param timeout Wait time out.
4015      * @throws TimeoutRuntimeException If more than numResultsWait results are
4016      * seen before the result matching myRequest arrives, or each individual wait
4017      * for result times out after 'timeout' ms.
4018      */
waitForResultValue(SimpleCaptureCallback listener, CaptureResult.Key<T> resultKey, T expectedValue, int numResultsWait, int timeout)4019     public static <T> void waitForResultValue(SimpleCaptureCallback listener,
4020             CaptureResult.Key<T> resultKey, T expectedValue, int numResultsWait, int timeout) {
4021         List<T> expectedValues = new ArrayList<T>();
4022         expectedValues.add(expectedValue);
4023         waitForAnyResultValue(listener, resultKey, expectedValues, numResultsWait, timeout);
4024     }
4025 
4026     /**
4027      * Wait for AE to be stabilized before capture: CONVERGED or FLASH_REQUIRED.
4028      *
4029      * <p>Waits for {@code android.sync.maxLatency} number of results first, to make sure
4030      * that the result is synchronized (or {@code numResultWaitForUnknownLatency} if the latency
4031      * is unknown.</p>
4032      *
4033      * <p>This is a no-op for {@code LEGACY} devices since they don't report
4034      * the {@code aeState} result.</p>
4035      *
4036      * @param resultListener The capture listener to get capture result back.
4037      * @param numResultWaitForUnknownLatency Number of frame to wait if camera device latency is
4038      *                                       unknown.
4039      * @param staticInfo corresponding camera device static metadata.
4040      * @param settingsTimeout wait timeout for settings application in ms.
4041      * @param resultTimeout wait timeout for result in ms.
4042      * @param numResultsWait Number of frame to wait before times out.
4043      */
waitForAeStable(SimpleCaptureCallback resultListener, int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int settingsTimeout, int numResultWait)4044     public static void waitForAeStable(SimpleCaptureCallback resultListener,
4045             int numResultWaitForUnknownLatency, StaticMetadata staticInfo,
4046             int settingsTimeout, int numResultWait) {
4047         waitForSettingsApplied(resultListener, numResultWaitForUnknownLatency, staticInfo,
4048                 settingsTimeout);
4049 
4050         if (!staticInfo.isHardwareLevelAtLeastLimited()) {
4051             // No-op for metadata
4052             return;
4053         }
4054         List<Integer> expectedAeStates = new ArrayList<Integer>();
4055         expectedAeStates.add(new Integer(CaptureResult.CONTROL_AE_STATE_CONVERGED));
4056         expectedAeStates.add(new Integer(CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED));
4057         waitForAnyResultValue(resultListener, CaptureResult.CONTROL_AE_STATE, expectedAeStates,
4058                 numResultWait, settingsTimeout);
4059     }
4060 
4061     /**
4062      * Wait for enough results for settings to be applied
4063      *
4064      * @param resultListener The capture listener to get capture result back.
4065      * @param numResultWaitForUnknownLatency Number of frame to wait if camera device latency is
4066      *                                       unknown.
4067      * @param staticInfo corresponding camera device static metadata.
4068      * @param timeout wait timeout in ms.
4069      */
waitForSettingsApplied(SimpleCaptureCallback resultListener, int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int timeout)4070     public static void waitForSettingsApplied(SimpleCaptureCallback resultListener,
4071             int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int timeout) {
4072         int maxLatency = staticInfo.getSyncMaxLatency();
4073         if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) {
4074             maxLatency = numResultWaitForUnknownLatency;
4075         }
4076         // Wait for settings to take effect
4077         waitForNumResults(resultListener, maxLatency, timeout);
4078     }
4079 
getSuitableFpsRangeForDuration(String cameraId, long frameDuration, StaticMetadata staticInfo)4080     public static Range<Integer> getSuitableFpsRangeForDuration(String cameraId,
4081             long frameDuration, StaticMetadata staticInfo) {
4082         // Add 0.05 here so Fps like 29.99 evaluated to 30
4083         int minBurstFps = (int) Math.floor(1e9 / frameDuration + 0.05f);
4084         boolean foundConstantMaxYUVRange = false;
4085         boolean foundYUVStreamingRange = false;
4086         boolean isExternalCamera = staticInfo.isExternalCamera();
4087         boolean isNIR = staticInfo.isNIRColorFilter();
4088 
4089         // Find suitable target FPS range - as high as possible that covers the max YUV rate
4090         // Also verify that there's a good preview rate as well
4091         List<Range<Integer> > fpsRanges = Arrays.asList(
4092                 staticInfo.getAeAvailableTargetFpsRangesChecked());
4093         Range<Integer> targetRange = null;
4094         for (Range<Integer> fpsRange : fpsRanges) {
4095             if (fpsRange.getLower() == minBurstFps && fpsRange.getUpper() == minBurstFps) {
4096                 foundConstantMaxYUVRange = true;
4097                 targetRange = fpsRange;
4098             } else if (isExternalCamera && fpsRange.getUpper() == minBurstFps) {
4099                 targetRange = fpsRange;
4100             }
4101             if (fpsRange.getLower() <= 15 && fpsRange.getUpper() == minBurstFps) {
4102                 foundYUVStreamingRange = true;
4103             }
4104 
4105         }
4106 
4107         if (!isExternalCamera) {
4108             assertTrue(String.format("Cam %s: Target FPS range of (%d, %d) must be supported",
4109                     cameraId, minBurstFps, minBurstFps), foundConstantMaxYUVRange);
4110         }
4111 
4112         if (!isNIR) {
4113             assertTrue(String.format(
4114                     "Cam %s: Target FPS range of (x, %d) where x <= 15 must be supported",
4115                     cameraId, minBurstFps), foundYUVStreamingRange);
4116         }
4117         return targetRange;
4118     }
4119     /**
4120      * Get the candidate supported zoom ratios for testing
4121      *
4122      * <p>
4123      * This function returns the bounary values of supported zoom ratio range in addition to 1.0x
4124      * zoom ratio.
4125      * </p>
4126      */
getCandidateZoomRatios(StaticMetadata staticInfo)4127     public static List<Float> getCandidateZoomRatios(StaticMetadata staticInfo) {
4128         List<Float> zoomRatios = new ArrayList<Float>();
4129         Range<Float> zoomRatioRange = staticInfo.getZoomRatioRangeChecked();
4130         zoomRatios.add(zoomRatioRange.getLower());
4131         if (zoomRatioRange.contains(1.0f) &&
4132                 1.0f - zoomRatioRange.getLower() > ZOOM_RATIO_THRESHOLD &&
4133                 zoomRatioRange.getUpper() - 1.0f > ZOOM_RATIO_THRESHOLD) {
4134             zoomRatios.add(1.0f);
4135         }
4136         zoomRatios.add(zoomRatioRange.getUpper());
4137 
4138         return zoomRatios;
4139     }
4140 
4141     /**
4142      * Get the primary rear facing camera from an ID list
4143      */
getPrimaryRearCamera(CameraManager manager, String[] cameraIds)4144     public static String getPrimaryRearCamera(CameraManager manager, String[] cameraIds)
4145             throws Exception {
4146         return getPrimaryCamera(manager, cameraIds, CameraCharacteristics.LENS_FACING_BACK);
4147     }
4148 
4149     /**
4150      * Get the primary front facing camera from an ID list
4151      */
getPrimaryFrontCamera(CameraManager manager, String[] cameraIds)4152     public static String getPrimaryFrontCamera(CameraManager manager, String[] cameraIds)
4153             throws Exception {
4154         return getPrimaryCamera(manager, cameraIds, CameraCharacteristics.LENS_FACING_FRONT);
4155     }
4156 
getPrimaryCamera(CameraManager manager, String[] cameraIds, Integer facing)4157     private static String getPrimaryCamera(CameraManager manager,
4158             String[] cameraIds, Integer facing) throws Exception {
4159         if (cameraIds == null) {
4160             return null;
4161         }
4162 
4163         for (String id : cameraIds) {
4164             if (isPrimaryCamera(manager, id, facing)) {
4165                 return id;
4166             }
4167         }
4168 
4169         return null;
4170     }
4171 
4172     /**
4173      * Check whether a camera Id is a primary rear facing camera
4174      */
isPrimaryRearFacingCamera(CameraManager manager, String cameraId)4175     public static boolean isPrimaryRearFacingCamera(CameraManager manager, String cameraId)
4176             throws Exception {
4177         return isPrimaryCamera(manager, cameraId, CameraCharacteristics.LENS_FACING_BACK);
4178     }
4179 
4180     /**
4181      * Check whether a camera Id is a primary front facing camera
4182      */
isPrimaryFrontFacingCamera(CameraManager manager, String cameraId)4183     public static boolean isPrimaryFrontFacingCamera(CameraManager manager, String cameraId)
4184             throws Exception {
4185         return isPrimaryCamera(manager, cameraId, CameraCharacteristics.LENS_FACING_FRONT);
4186     }
4187 
isPrimaryCamera(CameraManager manager, String cameraId, Integer lensFacing)4188     private static boolean isPrimaryCamera(CameraManager manager, String cameraId,
4189             Integer lensFacing) throws Exception {
4190         CameraCharacteristics characteristics;
4191         Integer facing;
4192 
4193         String [] ids = manager.getCameraIdList();
4194         for (String id : ids) {
4195             characteristics = manager.getCameraCharacteristics(id);
4196             facing = characteristics.get(CameraCharacteristics.LENS_FACING);
4197             if (lensFacing.equals(facing)) {
4198                 if (cameraId.equals(id)) {
4199                     return true;
4200                 } else {
4201                     return false;
4202                 }
4203             }
4204         }
4205         return false;
4206     }
4207 
4208     /**
4209      * Verifies the camera in this listener was opened and then unconfigured exactly once.
4210      *
4211      * <p>This assumes that no other action to the camera has been done (e.g.
4212      * it hasn't been configured, or closed, or disconnected). Verification is
4213      * performed immediately without any timeouts.</p>
4214      *
4215      * <p>This checks that the state has previously changed first for opened and then unconfigured.
4216      * Any other state transitions will fail. A test failure is thrown if verification fails.</p>
4217      *
4218      * @param cameraId Camera identifier
4219      * @param listener Listener which was passed to {@link CameraManager#openCamera}
4220      *
4221      * @return The camera device (non-{@code null}).
4222      */
verifyCameraStateOpened(String cameraId, MockStateCallback listener)4223     public static CameraDevice verifyCameraStateOpened(String cameraId,
4224             MockStateCallback listener) {
4225         ArgumentCaptor<CameraDevice> argument =
4226                 ArgumentCaptor.forClass(CameraDevice.class);
4227         InOrder inOrder = inOrder(listener);
4228 
4229         /**
4230          * State transitions (in that order):
4231          *  1) onOpened
4232          *
4233          * No other transitions must occur for successful #openCamera
4234          */
4235         inOrder.verify(listener)
4236                 .onOpened(argument.capture());
4237 
4238         CameraDevice camera = argument.getValue();
4239         assertNotNull(
4240                 String.format("Failed to open camera device ID: %s", cameraId),
4241                 camera);
4242 
4243         // Do not use inOrder here since that would skip anything called before onOpened
4244         verifyNoMoreInteractions(listener);
4245 
4246         return camera;
4247     }
4248 
verifySingleAvailabilityCbsReceived( LinkedBlockingQueue<String> expectedEventQueue, LinkedBlockingQueue<String> unExpectedEventQueue, String expectedId, String expectedStr, String unExpectedStr)4249     public static void verifySingleAvailabilityCbsReceived(
4250             LinkedBlockingQueue<String> expectedEventQueue,
4251             LinkedBlockingQueue<String> unExpectedEventQueue, String expectedId,
4252             String expectedStr, String unExpectedStr) throws Exception {
4253         String candidateId = expectedEventQueue.poll(AVAILABILITY_TIMEOUT_MS,
4254                 java.util.concurrent.TimeUnit.MILLISECONDS);
4255         assertNotNull("No " + expectedStr + " notice for expected ID " + expectedId, candidateId);
4256         assertTrue("Received " + expectedStr + " notice for wrong ID, " + "expected "
4257                 + expectedId + ", got " + candidateId, expectedId.equals(candidateId));
4258         assertTrue("Received >  1 " + expectedStr + " callback for id " + expectedId,
4259                 expectedEventQueue.size() == 0);
4260         assertTrue(unExpectedStr + " events received unexpectedly",
4261                 unExpectedEventQueue.size() == 0);
4262     }
4263 
verifyAvailabilityCbsReceived(HashSet<T> expectedCameras, LinkedBlockingQueue<T> expectedEventQueue, LinkedBlockingQueue<T> unExpectedEventQueue, boolean available)4264     public static <T> void verifyAvailabilityCbsReceived(HashSet<T> expectedCameras,
4265             LinkedBlockingQueue<T> expectedEventQueue, LinkedBlockingQueue<T> unExpectedEventQueue,
4266             boolean available) throws Exception {
4267         while (expectedCameras.size() > 0) {
4268             T id = expectedEventQueue.poll(AVAILABILITY_TIMEOUT_MS,
4269                     java.util.concurrent.TimeUnit.MILLISECONDS);
4270             assertTrue("Did not receive initial " + (available ? "available" : "unavailable")
4271                     + " notices for some cameras", id != null);
4272             assertTrue("Received initial " + (available ? "available" : "unavailable")
4273                     + " notice for wrong camera " + id, expectedCameras.contains(id));
4274             expectedCameras.remove(id);
4275         }
4276         // Verify no unexpected unavailable/available cameras were reported
4277         if (unExpectedEventQueue != null) {
4278             assertTrue("Received unexpected initial "
4279                     + (available ? "unavailable" : "available"),
4280                     unExpectedEventQueue.size() == 0);
4281         }
4282     }
4283 
4284     /**
4285      * This function polls on the event queue to get unavailable physical camera IDs belonging
4286      * to a particular logical camera. The event queue is drained before the function returns.
4287      *
4288      * @param queue The event queue capturing unavailable physical cameras
4289      * @param cameraId The logical camera ID
4290      *
4291      * @return The currently unavailable physical cameras
4292      */
getUnavailablePhysicalCamerasAndDrain( LinkedBlockingQueue<Pair<String, String>> queue, String cameraId)4293     private static Set<String> getUnavailablePhysicalCamerasAndDrain(
4294             LinkedBlockingQueue<Pair<String, String>> queue, String cameraId) throws Exception {
4295         Set<String> unavailablePhysicalCameras = new HashSet<String>();
4296 
4297         while (true) {
4298             Pair<String, String> unavailableIdCombo = queue.poll(
4299                     AVAILABILITY_TIMEOUT_MS, java.util.concurrent.TimeUnit.MILLISECONDS);
4300             if (unavailableIdCombo == null) {
4301                 // No more entries in the queue. Break out of the loop and return.
4302                 break;
4303             }
4304 
4305             if (cameraId.equals(unavailableIdCombo.first)) {
4306                 unavailablePhysicalCameras.add(unavailableIdCombo.second);
4307             }
4308         }
4309 
4310         return unavailablePhysicalCameras;
4311     }
4312 
testPhysicalCameraAvailabilityConsistencyHelper( String[] cameraIds, CameraManager manager, Handler handler, boolean expectInitialCallbackAfterOpen)4313     public static void testPhysicalCameraAvailabilityConsistencyHelper(
4314             String[] cameraIds, CameraManager manager,
4315             Handler handler, boolean expectInitialCallbackAfterOpen) throws Throwable {
4316         final LinkedBlockingQueue<String> availableEventQueue = new LinkedBlockingQueue<>();
4317         final LinkedBlockingQueue<String> unavailableEventQueue = new LinkedBlockingQueue<>();
4318         final LinkedBlockingQueue<Pair<String, String>> unavailablePhysicalCamEventQueue =
4319                 new LinkedBlockingQueue<>();
4320         CameraManager.AvailabilityCallback ac = new CameraManager.AvailabilityCallback() {
4321             @Override
4322             public void onCameraAvailable(String cameraId) {
4323                 super.onCameraAvailable(cameraId);
4324                 availableEventQueue.offer(cameraId);
4325             }
4326 
4327             @Override
4328             public void onCameraUnavailable(String cameraId) {
4329                 super.onCameraUnavailable(cameraId);
4330                 unavailableEventQueue.offer(cameraId);
4331             }
4332 
4333             @Override
4334             public void onPhysicalCameraAvailable(String cameraId, String physicalCameraId) {
4335                 super.onPhysicalCameraAvailable(cameraId, physicalCameraId);
4336                 unavailablePhysicalCamEventQueue.remove(new Pair<>(cameraId, physicalCameraId));
4337             }
4338 
4339             @Override
4340             public void onPhysicalCameraUnavailable(String cameraId, String physicalCameraId) {
4341                 super.onPhysicalCameraUnavailable(cameraId, physicalCameraId);
4342                 unavailablePhysicalCamEventQueue.offer(new Pair<>(cameraId, physicalCameraId));
4343             }
4344         };
4345 
4346         String[] cameras = cameraIds;
4347         if (cameras.length == 0) {
4348             Log.i(TAG, "Skipping testPhysicalCameraAvailabilityConsistency, no cameras");
4349             return;
4350         }
4351 
4352         for (String cameraId : cameras) {
4353             CameraCharacteristics ch = manager.getCameraCharacteristics(cameraId);
4354             StaticMetadata staticInfo = new StaticMetadata(ch);
4355             if (!staticInfo.isLogicalMultiCamera()) {
4356                 // Test is only applicable for logical multi-camera.
4357                 continue;
4358             }
4359 
4360             // Get initial physical unavailable callbacks without opening camera
4361             manager.registerAvailabilityCallback(ac, handler);
4362             Set<String> unavailablePhysicalCameras = getUnavailablePhysicalCamerasAndDrain(
4363                     unavailablePhysicalCamEventQueue, cameraId);
4364 
4365             // Open camera
4366             MockStateCallback mockListener = MockStateCallback.mock();
4367             BlockingStateCallback cameraListener = new BlockingStateCallback(mockListener);
4368             manager.openCamera(cameraId, cameraListener, handler);
4369             // Block until opened
4370             cameraListener.waitForState(BlockingStateCallback.STATE_OPENED,
4371                     CameraTestUtils.CAMERA_IDLE_TIMEOUT_MS);
4372             // Then verify only open happened, and get the camera handle
4373             CameraDevice camera = CameraTestUtils.verifyCameraStateOpened(cameraId, mockListener);
4374 
4375             // The camera should be in available->unavailable state.
4376             String candidateUnavailableId = unavailableEventQueue.poll(AVAILABILITY_TIMEOUT_MS,
4377                     java.util.concurrent.TimeUnit.MILLISECONDS);
4378             assertNotNull("No unavailable notice for expected ID " + cameraId,
4379                     candidateUnavailableId);
4380             assertTrue("Received unavailable notice for wrong ID, "
4381                     + "expected " + cameraId + ", got " + candidateUnavailableId,
4382                     cameraId.equals(candidateUnavailableId));
4383             assertTrue("Received >  1 unavailable callback for id " + cameraId,
4384                     unavailableEventQueue.size() == 0);
4385             availableEventQueue.clear();
4386             unavailableEventQueue.clear();
4387 
4388             manager.unregisterAvailabilityCallback(ac);
4389             // Get physical unavailable callbacks while camera is open
4390             manager.registerAvailabilityCallback(ac, handler);
4391             HashSet<String> expectedAvailableCameras = new HashSet<String>(Arrays.asList(cameras));
4392             expectedAvailableCameras.remove(cameraId);
4393             HashSet<String> expectedUnavailableCameras =
4394                     new HashSet<String>(Arrays.asList(cameraId));
4395             CameraTestUtils.verifyAvailabilityCbsReceived(expectedAvailableCameras,
4396                     availableEventQueue, null, /*available*/ true);
4397             CameraTestUtils.verifyAvailabilityCbsReceived(expectedUnavailableCameras,
4398                     unavailableEventQueue, null, /*available*/ false);
4399             Set<String> unavailablePhysicalCamerasWhileOpen = getUnavailablePhysicalCamerasAndDrain(
4400                     unavailablePhysicalCamEventQueue, cameraId);
4401             if (expectInitialCallbackAfterOpen) {
4402                 assertTrue("The unavailable physical cameras must be the same between before open "
4403                         + unavailablePhysicalCameras.toString()  + " and after open "
4404                         + unavailablePhysicalCamerasWhileOpen.toString(),
4405                         unavailablePhysicalCameras.equals(unavailablePhysicalCamerasWhileOpen));
4406             } else {
4407                 assertTrue("The physical camera unavailability callback must not be called when "
4408                         + "the logical camera is open",
4409                         unavailablePhysicalCamerasWhileOpen.isEmpty());
4410             }
4411 
4412             // Close camera device
4413             camera.close();
4414             cameraListener.waitForState(BlockingStateCallback.STATE_CLOSED,
4415                     CameraTestUtils.CAMERA_CLOSE_TIMEOUT_MS);
4416             CameraTestUtils.verifySingleAvailabilityCbsReceived(availableEventQueue,
4417                     unavailableEventQueue, cameraId, "availability", "Unavailability");
4418 
4419             // Get physical unavailable callbacks after opening and closing camera
4420             Set<String> unavailablePhysicalCamerasAfterClose =
4421                     getUnavailablePhysicalCamerasAndDrain(
4422                             unavailablePhysicalCamEventQueue, cameraId);
4423 
4424             assertTrue("The unavailable physical cameras must be the same between before open "
4425                     + unavailablePhysicalCameras.toString()  + " and after close "
4426                     + unavailablePhysicalCamerasAfterClose.toString(),
4427                     unavailablePhysicalCameras.equals(unavailablePhysicalCamerasAfterClose));
4428 
4429             manager.unregisterAvailabilityCallback(ac);
4430         }
4431 
4432     }
4433 }
4434