• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import android.graphics.Bitmap;
20 import android.graphics.BitmapFactory;
21 import android.graphics.ImageFormat;
22 import android.graphics.PointF;
23 import android.graphics.Rect;
24 import android.graphics.SurfaceTexture;
25 import android.hardware.camera2.CameraAccessException;
26 import android.hardware.camera2.CameraCaptureSession;
27 import android.hardware.camera2.CameraCharacteristics;
28 import android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession;
29 import android.hardware.camera2.CameraDevice;
30 import android.hardware.camera2.CameraManager;
31 import android.hardware.camera2.CameraMetadata;
32 import android.hardware.camera2.CaptureFailure;
33 import android.hardware.camera2.CaptureRequest;
34 import android.hardware.camera2.CaptureResult;
35 import android.hardware.camera2.MultiResolutionImageReader;
36 import android.hardware.camera2.TotalCaptureResult;
37 import android.hardware.camera2.cts.helpers.CameraErrorCollector;
38 import android.hardware.camera2.cts.helpers.StaticMetadata;
39 import android.hardware.camera2.params.DynamicRangeProfiles;
40 import android.hardware.camera2.params.InputConfiguration;
41 import android.hardware.camera2.params.MandatoryStreamCombination.MandatoryStreamInformation;
42 import android.hardware.camera2.params.MeteringRectangle;
43 import android.hardware.camera2.params.MultiResolutionStreamConfigurationMap;
44 import android.hardware.camera2.params.MultiResolutionStreamInfo;
45 import android.hardware.camera2.params.OutputConfiguration;
46 import android.hardware.camera2.params.SessionConfiguration;
47 import android.hardware.camera2.params.StreamConfigurationMap;
48 import android.hardware.cts.helpers.CameraUtils;
49 import android.location.Location;
50 import android.location.LocationManager;
51 import android.media.ExifInterface;
52 import android.media.Image;
53 import android.media.Image.Plane;
54 import android.media.ImageReader;
55 import android.media.ImageWriter;
56 import android.os.Build;
57 import android.os.ConditionVariable;
58 import android.os.Handler;
59 import android.util.Log;
60 import android.util.Pair;
61 import android.util.Range;
62 import android.util.Size;
63 import android.view.Surface;
64 import android.view.WindowManager;
65 import android.view.WindowMetrics;
66 
67 import androidx.annotation.NonNull;
68 
69 import com.android.ex.camera2.blocking.BlockingCameraManager;
70 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
71 import com.android.ex.camera2.blocking.BlockingSessionCallback;
72 import com.android.ex.camera2.blocking.BlockingStateCallback;
73 import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
74 
75 import junit.framework.Assert;
76 
77 import org.mockito.Mockito;
78 
79 import java.io.FileOutputStream;
80 import java.io.IOException;
81 import java.lang.reflect.Array;
82 import java.nio.ByteBuffer;
83 import java.text.ParseException;
84 import java.text.SimpleDateFormat;
85 import java.util.ArrayList;
86 import java.util.Arrays;
87 import java.util.Collection;
88 import java.util.Collections;
89 import java.util.Comparator;
90 import java.util.Date;
91 import java.util.HashMap;
92 import java.util.HashSet;
93 import java.util.Iterator;
94 import java.util.List;
95 import java.util.Random;
96 import java.util.Set;
97 import java.util.concurrent.Executor;
98 import java.util.concurrent.LinkedBlockingQueue;
99 import java.util.concurrent.Semaphore;
100 import java.util.concurrent.TimeUnit;
101 import java.util.concurrent.atomic.AtomicLong;
102 
103 /**
104  * A package private utility class for wrapping up the camera2 cts test common utility functions
105  */
106 public class CameraTestUtils extends Assert {
107     private static final String TAG = "CameraTestUtils";
108     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
109     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
110     public static final Size SIZE_BOUND_720P = new Size(1280, 720);
111     public static final Size SIZE_BOUND_1080P = new Size(1920, 1088);
112     public static final Size SIZE_BOUND_2K = new Size(2048, 1088);
113     public static final Size SIZE_BOUND_QHD = new Size(2560, 1440);
114     public static final Size SIZE_BOUND_2160P = new Size(3840, 2160);
115     // Only test the preview size that is no larger than 1080p.
116     public static final Size PREVIEW_SIZE_BOUND = SIZE_BOUND_1080P;
117     // Default timeouts for reaching various states
118     public static final int CAMERA_OPEN_TIMEOUT_MS = 3000;
119     public static final int CAMERA_CLOSE_TIMEOUT_MS = 3000;
120     public static final int CAMERA_IDLE_TIMEOUT_MS = 3000;
121     public static final int CAMERA_ACTIVE_TIMEOUT_MS = 1000;
122     public static final int CAMERA_BUSY_TIMEOUT_MS = 1000;
123     public static final int CAMERA_UNCONFIGURED_TIMEOUT_MS = 1000;
124     public static final int CAMERA_CONFIGURE_TIMEOUT_MS = 3000;
125     public static final int CAPTURE_RESULT_TIMEOUT_MS = 3000;
126     public static final int CAPTURE_IMAGE_TIMEOUT_MS = 3000;
127 
128     public static final int SESSION_CONFIGURE_TIMEOUT_MS = 3000;
129     public static final int SESSION_CLOSE_TIMEOUT_MS = 3000;
130     public static final int SESSION_READY_TIMEOUT_MS = 5000;
131     public static final int SESSION_ACTIVE_TIMEOUT_MS = 1000;
132 
133     public static final int MAX_READER_IMAGES = 5;
134 
135     public static final int INDEX_ALGORITHM_AE = 0;
136     public static final int INDEX_ALGORITHM_AWB = 1;
137     public static final int INDEX_ALGORITHM_AF = 2;
138     public static final int NUM_ALGORITHMS = 3; // AE, AWB and AF
139 
140     // Compensate for the loss of "sensitivity" and "sensitivityBoost"
141     public static final int MAX_ISO_MISMATCH = 3;
142 
143     public static final String OFFLINE_CAMERA_ID = "offline_camera_id";
144     public static final String REPORT_LOG_NAME = "CtsCameraTestCases";
145 
146     private static final int EXIF_DATETIME_LENGTH = 19;
147     private static final int EXIF_DATETIME_ERROR_MARGIN_SEC = 60;
148     private static final float EXIF_FOCAL_LENGTH_ERROR_MARGIN = 0.001f;
149     private static final float EXIF_EXPOSURE_TIME_ERROR_MARGIN_RATIO = 0.05f;
150     private static final float EXIF_EXPOSURE_TIME_MIN_ERROR_MARGIN_SEC = 0.002f;
151     private static final float EXIF_APERTURE_ERROR_MARGIN = 0.001f;
152 
153     private static final float ZOOM_RATIO_THRESHOLD = 0.01f;
154 
155     private static final Location sTestLocation0 = new Location(LocationManager.GPS_PROVIDER);
156     private static final Location sTestLocation1 = new Location(LocationManager.GPS_PROVIDER);
157     private static final Location sTestLocation2 = new Location(LocationManager.NETWORK_PROVIDER);
158 
159     static {
160         sTestLocation0.setTime(1199145600000L);
161         sTestLocation0.setLatitude(37.736071);
162         sTestLocation0.setLongitude(-122.441983);
163         sTestLocation0.setAltitude(21.0);
164 
165         sTestLocation1.setTime(1199145601000L);
166         sTestLocation1.setLatitude(0.736071);
167         sTestLocation1.setLongitude(0.441983);
168         sTestLocation1.setAltitude(1.0);
169 
170         sTestLocation2.setTime(1199145602000L);
171         sTestLocation2.setLatitude(-89.736071);
172         sTestLocation2.setLongitude(-179.441983);
173         sTestLocation2.setAltitude(100000.0);
174     }
175 
176     // Exif test data vectors.
177     public static final ExifTestData[] EXIF_TEST_DATA = {
178             new ExifTestData(
179                     /*gpsLocation*/ sTestLocation0,
180                     /* orientation */90,
181                     /* jpgQuality */(byte) 80,
182                     /* thumbQuality */(byte) 75),
183             new ExifTestData(
184                     /*gpsLocation*/ sTestLocation1,
185                     /* orientation */180,
186                     /* jpgQuality */(byte) 90,
187                     /* thumbQuality */(byte) 85),
188             new ExifTestData(
189                     /*gpsLocation*/ sTestLocation2,
190                     /* orientation */270,
191                     /* jpgQuality */(byte) 100,
192                     /* thumbQuality */(byte) 80)
193     };
194 
195     /**
196      * Create an {@link android.media.ImageReader} object and get the surface.
197      *
198      * @param size The size of this ImageReader to be created.
199      * @param format The format of this ImageReader to be created
200      * @param maxNumImages The max number of images that can be acquired simultaneously.
201      * @param listener The listener used by this ImageReader to notify callbacks.
202      * @param handler The handler to use for any listener callbacks.
203      */
makeImageReader(Size size, int format, int maxNumImages, ImageReader.OnImageAvailableListener listener, Handler handler)204     public static ImageReader makeImageReader(Size size, int format, int maxNumImages,
205             ImageReader.OnImageAvailableListener listener, Handler handler) {
206         ImageReader reader;
207         reader = ImageReader.newInstance(size.getWidth(), size.getHeight(), format,
208                 maxNumImages);
209         reader.setOnImageAvailableListener(listener, handler);
210         if (VERBOSE) Log.v(TAG, "Created ImageReader size " + size);
211         return reader;
212     }
213 
214     /**
215      * Create an ImageWriter and hook up the ImageListener.
216      *
217      * @param inputSurface The input surface of the ImageWriter.
218      * @param maxImages The max number of Images that can be dequeued simultaneously.
219      * @param listener The listener used by this ImageWriter to notify callbacks
220      * @param handler The handler to post listener callbacks.
221      * @return ImageWriter object created.
222      */
makeImageWriter( Surface inputSurface, int maxImages, ImageWriter.OnImageReleasedListener listener, Handler handler)223     public static ImageWriter makeImageWriter(
224             Surface inputSurface, int maxImages,
225             ImageWriter.OnImageReleasedListener listener, Handler handler) {
226         ImageWriter writer = ImageWriter.newInstance(inputSurface, maxImages);
227         writer.setOnImageReleasedListener(listener, handler);
228         return writer;
229     }
230 
231     /**
232      * Utility class to store the targets for mandatory stream combination test.
233      */
234     public static class StreamCombinationTargets {
235         public List<SurfaceTexture> mPrivTargets = new ArrayList<>();
236         public List<ImageReader> mJpegTargets = new ArrayList<>();
237         public List<ImageReader> mYuvTargets = new ArrayList<>();
238         public List<ImageReader> mY8Targets = new ArrayList<>();
239         public List<ImageReader> mRawTargets = new ArrayList<>();
240         public List<ImageReader> mHeicTargets = new ArrayList<>();
241         public List<ImageReader> mDepth16Targets = new ArrayList<>();
242         public List<ImageReader> mP010Targets = new ArrayList<>();
243 
244 
245         public List<MultiResolutionImageReader> mPrivMultiResTargets = new ArrayList<>();
246         public List<MultiResolutionImageReader> mJpegMultiResTargets = new ArrayList<>();
247         public List<MultiResolutionImageReader> mYuvMultiResTargets = new ArrayList<>();
248         public List<MultiResolutionImageReader> mRawMultiResTargets = new ArrayList<>();
249 
close()250         public void close() {
251             for (SurfaceTexture target : mPrivTargets) {
252                 target.release();
253             }
254             for (ImageReader target : mJpegTargets) {
255                 target.close();
256             }
257             for (ImageReader target : mYuvTargets) {
258                 target.close();
259             }
260             for (ImageReader target : mY8Targets) {
261                 target.close();
262             }
263             for (ImageReader target : mRawTargets) {
264                 target.close();
265             }
266             for (ImageReader target : mHeicTargets) {
267                 target.close();
268             }
269             for (ImageReader target : mDepth16Targets) {
270                 target.close();
271             }
272             for (ImageReader target : mP010Targets) {
273                 target.close();
274             }
275 
276             for (MultiResolutionImageReader target : mPrivMultiResTargets) {
277                 target.close();
278             }
279             for (MultiResolutionImageReader target : mJpegMultiResTargets) {
280                 target.close();
281             }
282             for (MultiResolutionImageReader target : mYuvMultiResTargets) {
283                 target.close();
284             }
285             for (MultiResolutionImageReader target : mRawMultiResTargets) {
286                 target.close();
287             }
288         }
289     }
290 
configureTarget(StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, int format, Size targetSize, int numBuffers, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, boolean createMultiResiStreamConfig, ImageDropperListener listener, Handler handler, long dynamicRangeProfile, long streamUseCase)291     private static void configureTarget(StreamCombinationTargets targets,
292             List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces,
293             int format, Size targetSize, int numBuffers, String overridePhysicalCameraId,
294             MultiResolutionStreamConfigurationMap multiResStreamConfig,
295             boolean createMultiResiStreamConfig, ImageDropperListener listener, Handler handler,
296             long dynamicRangeProfile, long streamUseCase) {
297         if (createMultiResiStreamConfig) {
298             Collection<MultiResolutionStreamInfo> multiResolutionStreams =
299                     multiResStreamConfig.getOutputInfo(format);
300             MultiResolutionImageReader multiResReader = new MultiResolutionImageReader(
301                     multiResolutionStreams, format, numBuffers);
302             multiResReader.setOnImageAvailableListener(listener, new HandlerExecutor(handler));
303             Collection<OutputConfiguration> configs =
304                     OutputConfiguration.createInstancesForMultiResolutionOutput(multiResReader);
305             outputConfigs.addAll(configs);
306             outputSurfaces.add(multiResReader.getSurface());
307             switch (format) {
308                 case ImageFormat.PRIVATE:
309                     targets.mPrivMultiResTargets.add(multiResReader);
310                     break;
311                 case ImageFormat.JPEG:
312                     targets.mJpegMultiResTargets.add(multiResReader);
313                     break;
314                 case ImageFormat.YUV_420_888:
315                     targets.mYuvMultiResTargets.add(multiResReader);
316                     break;
317                 case ImageFormat.RAW_SENSOR:
318                     targets.mRawMultiResTargets.add(multiResReader);
319                     break;
320                 default:
321                     fail("Unknown/Unsupported output format " + format);
322             }
323         } else {
324             if (format == ImageFormat.PRIVATE) {
325                 SurfaceTexture target = new SurfaceTexture(/*random int*/1);
326                 target.setDefaultBufferSize(targetSize.getWidth(), targetSize.getHeight());
327                 OutputConfiguration config = new OutputConfiguration(new Surface(target));
328                 if (overridePhysicalCameraId != null) {
329                     config.setPhysicalCameraId(overridePhysicalCameraId);
330                 }
331                 config.setDynamicRangeProfile(dynamicRangeProfile);
332                 config.setStreamUseCase(streamUseCase);
333                 outputConfigs.add(config);
334                 outputSurfaces.add(config.getSurface());
335                 targets.mPrivTargets.add(target);
336             } else {
337                 ImageReader target = ImageReader.newInstance(targetSize.getWidth(),
338                         targetSize.getHeight(), format, numBuffers);
339                 target.setOnImageAvailableListener(listener, handler);
340                 OutputConfiguration config = new OutputConfiguration(target.getSurface());
341                 if (overridePhysicalCameraId != null) {
342                     config.setPhysicalCameraId(overridePhysicalCameraId);
343                 }
344                 config.setDynamicRangeProfile(dynamicRangeProfile);
345                 config.setStreamUseCase(streamUseCase);
346                 outputConfigs.add(config);
347                 outputSurfaces.add(config.getSurface());
348 
349                 switch (format) {
350                     case ImageFormat.JPEG:
351                       targets.mJpegTargets.add(target);
352                       break;
353                     case ImageFormat.YUV_420_888:
354                       targets.mYuvTargets.add(target);
355                       break;
356                     case ImageFormat.Y8:
357                       targets.mY8Targets.add(target);
358                       break;
359                     case ImageFormat.RAW_SENSOR:
360                       targets.mRawTargets.add(target);
361                       break;
362                     case ImageFormat.HEIC:
363                       targets.mHeicTargets.add(target);
364                       break;
365                     case ImageFormat.DEPTH16:
366                       targets.mDepth16Targets.add(target);
367                       break;
368                     case ImageFormat.YCBCR_P010:
369                       targets.mP010Targets.add(target);
370                       break;
371                     default:
372                       fail("Unknown/Unsupported output format " + format);
373                 }
374             }
375         }
376     }
377 
setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridenPhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler)378     public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo,
379             StreamCombinationTargets targets,
380             List<OutputConfiguration> outputConfigs,
381             List<Surface> outputSurfaces, int numBuffers,
382             boolean substituteY8, boolean substituteHeic, String overridenPhysicalCameraId,
383             MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler) {
384             List<Surface> uhSurfaces = new ArrayList<Surface>();
385         setupConfigurationTargets(streamsInfo, targets, outputConfigs, outputSurfaces, uhSurfaces,
386             numBuffers, substituteY8, substituteHeic, overridenPhysicalCameraId,
387             multiResStreamConfig, handler);
388     }
389 
setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler)390     public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo,
391             StreamCombinationTargets targets,
392             List<OutputConfiguration> outputConfigs,
393             List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers,
394             boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId,
395             MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler) {
396         setupConfigurationTargets(streamsInfo, targets, outputConfigs, outputSurfaces, uhSurfaces,
397                 numBuffers, substituteY8, substituteHeic, overridePhysicalCameraId,
398                 multiResStreamConfig, handler, /*dynamicRangeProfiles*/ null);
399     }
400 
setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler, List<Long> dynamicRangeProfiles)401     public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo,
402             StreamCombinationTargets targets,
403             List<OutputConfiguration> outputConfigs,
404             List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers,
405             boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId,
406             MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler,
407             List<Long> dynamicRangeProfiles) {
408 
409         Random rnd = new Random();
410         // 10-bit output capable streams will use a fixed dynamic range profile in case
411         // dynamicRangeProfiles.size() == 1 or random in case dynamicRangeProfiles.size() > 1
412         boolean use10BitRandomProfile = (dynamicRangeProfiles != null) &&
413                 (dynamicRangeProfiles.size() > 1);
414         if (use10BitRandomProfile) {
415             Long seed = rnd.nextLong();
416             Log.i(TAG, "Random seed used for selecting 10-bit output: " + seed);
417             rnd.setSeed(seed);
418         }
419         ImageDropperListener imageDropperListener = new ImageDropperListener();
420         List<Surface> chosenSurfaces;
421         for (MandatoryStreamInformation streamInfo : streamsInfo) {
422             if (streamInfo.isInput()) {
423                 continue;
424             }
425             chosenSurfaces = outputSurfaces;
426             if (streamInfo.isUltraHighResolution()) {
427                 chosenSurfaces = uhSurfaces;
428             }
429             int format = streamInfo.getFormat();
430             if (substituteY8 && (format == ImageFormat.YUV_420_888)) {
431                 format = ImageFormat.Y8;
432             } else if (substituteHeic && (format == ImageFormat.JPEG)) {
433                 format = ImageFormat.HEIC;
434             }
435 
436             long dynamicRangeProfile = DynamicRangeProfiles.STANDARD;
437             if (streamInfo.is10BitCapable() && use10BitRandomProfile) {
438                 boolean override10bit = rnd.nextBoolean();
439                 if (!override10bit) {
440                     dynamicRangeProfile = dynamicRangeProfiles.get(rnd.nextInt(
441                             dynamicRangeProfiles.size()));
442                     format = streamInfo.get10BitFormat();
443                 }
444             } else if (streamInfo.is10BitCapable() && (dynamicRangeProfiles != null)) {
445                 dynamicRangeProfile = dynamicRangeProfiles.get(0);
446                 format = streamInfo.get10BitFormat();
447             }
448             Size[] availableSizes = new Size[streamInfo.getAvailableSizes().size()];
449             availableSizes = streamInfo.getAvailableSizes().toArray(availableSizes);
450             Size targetSize = CameraTestUtils.getMaxSize(availableSizes);
451             boolean createMultiResReader =
452                     (multiResStreamConfig != null &&
453                      !multiResStreamConfig.getOutputInfo(format).isEmpty() &&
454                      streamInfo.isMaximumSize());
455             switch (format) {
456                 case ImageFormat.PRIVATE:
457                 case ImageFormat.JPEG:
458                 case ImageFormat.YUV_420_888:
459                 case ImageFormat.YCBCR_P010:
460                 case ImageFormat.Y8:
461                 case ImageFormat.HEIC:
462                 case ImageFormat.DEPTH16:
463                 {
464                     configureTarget(targets, outputConfigs, chosenSurfaces, format,
465                             targetSize, numBuffers, overridePhysicalCameraId, multiResStreamConfig,
466                             createMultiResReader, imageDropperListener, handler,
467                             dynamicRangeProfile, streamInfo.getStreamUseCase());
468                     break;
469                 }
470                 case ImageFormat.RAW_SENSOR: {
471                     // targetSize could be null in the logical camera case where only
472                     // physical camera supports RAW stream.
473                     if (targetSize != null) {
474                         configureTarget(targets, outputConfigs, chosenSurfaces, format,
475                                 targetSize, numBuffers, overridePhysicalCameraId,
476                                 multiResStreamConfig, createMultiResReader, imageDropperListener,
477                                 handler, dynamicRangeProfile, streamInfo.getStreamUseCase());
478                     }
479                     break;
480                 }
481                 default:
482                     fail("Unknown output format " + format);
483             }
484         }
485     }
486 
487     /**
488      * Close pending images and clean up an {@link android.media.ImageReader} object.
489      * @param reader an {@link android.media.ImageReader} to close.
490      */
closeImageReader(ImageReader reader)491     public static void closeImageReader(ImageReader reader) {
492         if (reader != null) {
493             reader.close();
494         }
495     }
496 
497     /**
498      * Close the pending images then close current active {@link ImageReader} objects.
499      */
closeImageReaders(ImageReader[] readers)500     public static void closeImageReaders(ImageReader[] readers) {
501         if ((readers != null) && (readers.length > 0)) {
502             for (ImageReader reader : readers) {
503                 CameraTestUtils.closeImageReader(reader);
504             }
505         }
506     }
507 
508     /**
509      * Close pending images and clean up an {@link android.media.ImageWriter} object.
510      * @param writer an {@link android.media.ImageWriter} to close.
511      */
closeImageWriter(ImageWriter writer)512     public static void closeImageWriter(ImageWriter writer) {
513         if (writer != null) {
514             writer.close();
515         }
516     }
517 
518     /**
519      * Dummy listener that release the image immediately once it is available.
520      *
521      * <p>
522      * It can be used for the case where we don't care the image data at all.
523      * </p>
524      */
525     public static class ImageDropperListener implements ImageReader.OnImageAvailableListener {
526         @Override
onImageAvailable(ImageReader reader)527         public synchronized void onImageAvailable(ImageReader reader) {
528             Image image = null;
529             try {
530                 image = reader.acquireNextImage();
531             } finally {
532                 if (image != null) {
533                     image.close();
534                     mImagesDropped++;
535                 }
536             }
537         }
538 
getImageCount()539         public synchronized int getImageCount() {
540             return mImagesDropped;
541         }
542 
resetImageCount()543         public synchronized void resetImageCount() {
544             mImagesDropped = 0;
545         }
546 
547         private int mImagesDropped = 0;
548     }
549 
550     /**
551      * Image listener that release the image immediately after validating the image
552      */
553     public static class ImageVerifierListener implements ImageReader.OnImageAvailableListener {
554         private Size mSize;
555         private int mFormat;
556         // Whether the parent ImageReader is valid or not. If the parent ImageReader
557         // is destroyed, the acquired Image may become invalid.
558         private boolean mReaderIsValid;
559 
ImageVerifierListener(Size sz, int format)560         public ImageVerifierListener(Size sz, int format) {
561             mSize = sz;
562             mFormat = format;
563             mReaderIsValid = true;
564         }
565 
onReaderDestroyed()566         public synchronized void onReaderDestroyed() {
567             mReaderIsValid = false;
568         }
569 
570         @Override
onImageAvailable(ImageReader reader)571         public synchronized void onImageAvailable(ImageReader reader) {
572             Image image = null;
573             try {
574                 image = reader.acquireNextImage();
575             } finally {
576                 if (image != null) {
577                     // Should only do some quick validity checks in callback, as the ImageReader
578                     // could be closed asynchronously, which will close all images acquired from
579                     // this ImageReader.
580                     checkImage(image, mSize.getWidth(), mSize.getHeight(), mFormat);
581                     // checkAndroidImageFormat calls into underlying Image object, which could
582                     // become invalid if the ImageReader is destroyed.
583                     if (mReaderIsValid) {
584                         checkAndroidImageFormat(image);
585                     }
586                     image.close();
587                 }
588             }
589         }
590     }
591 
592     public static class SimpleImageReaderListener
593             implements ImageReader.OnImageAvailableListener {
594         private final LinkedBlockingQueue<Image> mQueue =
595                 new LinkedBlockingQueue<Image>();
596         // Indicate whether this listener will drop images or not,
597         // when the queued images reaches the reader maxImages
598         private final boolean mAsyncMode;
599         // maxImages held by the queue in async mode.
600         private final int mMaxImages;
601 
602         /**
603          * Create a synchronous SimpleImageReaderListener that queues the images
604          * automatically when they are available, no image will be dropped. If
605          * the caller doesn't call getImage(), the producer will eventually run
606          * into buffer starvation.
607          */
SimpleImageReaderListener()608         public SimpleImageReaderListener() {
609             mAsyncMode = false;
610             mMaxImages = 0;
611         }
612 
613         /**
614          * Create a synchronous/asynchronous SimpleImageReaderListener that
615          * queues the images automatically when they are available. For
616          * asynchronous listener, image will be dropped if the queued images
617          * reach to maxImages queued. If the caller doesn't call getImage(), the
618          * producer will not be blocked. For synchronous listener, no image will
619          * be dropped. If the caller doesn't call getImage(), the producer will
620          * eventually run into buffer starvation.
621          *
622          * @param asyncMode If the listener is operating at asynchronous mode.
623          * @param maxImages The max number of images held by this listener.
624          */
625         /**
626          *
627          * @param asyncMode
628          */
SimpleImageReaderListener(boolean asyncMode, int maxImages)629         public SimpleImageReaderListener(boolean asyncMode, int maxImages) {
630             mAsyncMode = asyncMode;
631             mMaxImages = maxImages;
632         }
633 
634         @Override
onImageAvailable(ImageReader reader)635         public void onImageAvailable(ImageReader reader) {
636             try {
637                 Image imge = reader.acquireNextImage();
638                 if (imge == null) {
639                     return;
640                 }
641                 mQueue.put(imge);
642                 if (mAsyncMode && mQueue.size() >= mMaxImages) {
643                     Image img = mQueue.poll();
644                     img.close();
645                 }
646             } catch (InterruptedException e) {
647                 throw new UnsupportedOperationException(
648                         "Can't handle InterruptedException in onImageAvailable");
649             }
650         }
651 
652         /**
653          * Get an image from the image reader.
654          *
655          * @param timeout Timeout value for the wait.
656          * @return The image from the image reader.
657          */
getImage(long timeout)658         public Image getImage(long timeout) throws InterruptedException {
659             Image image = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
660             assertNotNull("Wait for an image timed out in " + timeout + "ms", image);
661             return image;
662         }
663 
664         /**
665          * Drain the pending images held by this listener currently.
666          *
667          */
drain()668         public void drain() {
669             while (!mQueue.isEmpty()) {
670                 Image image = mQueue.poll();
671                 assertNotNull("Unable to get an image", image);
672                 image.close();
673             }
674         }
675     }
676 
677     public static class SimpleImageWriterListener implements ImageWriter.OnImageReleasedListener {
678         private final Semaphore mImageReleasedSema = new Semaphore(0);
679         private final ImageWriter mWriter;
680         @Override
onImageReleased(ImageWriter writer)681         public void onImageReleased(ImageWriter writer) {
682             if (writer != mWriter) {
683                 return;
684             }
685 
686             if (VERBOSE) {
687                 Log.v(TAG, "Input image is released");
688             }
689             mImageReleasedSema.release();
690         }
691 
SimpleImageWriterListener(ImageWriter writer)692         public SimpleImageWriterListener(ImageWriter writer) {
693             if (writer == null) {
694                 throw new IllegalArgumentException("writer cannot be null");
695             }
696             mWriter = writer;
697         }
698 
waitForImageReleased(long timeoutMs)699         public void waitForImageReleased(long timeoutMs) throws InterruptedException {
700             if (!mImageReleasedSema.tryAcquire(timeoutMs, TimeUnit.MILLISECONDS)) {
701                 fail("wait for image available timed out after " + timeoutMs + "ms");
702             }
703         }
704     }
705 
706     public static class ImageAndMultiResStreamInfo {
707         public final Image image;
708         public final MultiResolutionStreamInfo streamInfo;
709 
ImageAndMultiResStreamInfo(Image image, MultiResolutionStreamInfo streamInfo)710         public ImageAndMultiResStreamInfo(Image image, MultiResolutionStreamInfo streamInfo) {
711             this.image = image;
712             this.streamInfo = streamInfo;
713         }
714     }
715 
716     public static class SimpleMultiResolutionImageReaderListener
717             implements ImageReader.OnImageAvailableListener {
SimpleMultiResolutionImageReaderListener(MultiResolutionImageReader owner, int maxBuffers, boolean acquireLatest)718         public SimpleMultiResolutionImageReaderListener(MultiResolutionImageReader owner,
719                 int maxBuffers, boolean acquireLatest) {
720             mOwner = owner;
721             mMaxBuffers = maxBuffers;
722             mAcquireLatest = acquireLatest;
723         }
724 
725         @Override
onImageAvailable(ImageReader reader)726         public void onImageAvailable(ImageReader reader) {
727             if (VERBOSE) Log.v(TAG, "new image available from reader " + reader.toString());
728 
729             if (mAcquireLatest) {
730                 synchronized (mLock) {
731                     // If there is switch of image readers, acquire and releases all images
732                     // from the previous image reader
733                     if (mLastReader != reader) {
734                         if (mLastReader != null) {
735                             Image image = mLastReader.acquireLatestImage();
736                             if (image != null) {
737                                 image.close();
738                             }
739                         }
740                         mLastReader = reader;
741                     }
742                 }
743                 mImageAvailable.open();
744             } else {
745                 if (mQueue.size() < mMaxBuffers) {
746                     Image image = reader.acquireNextImage();
747                     MultiResolutionStreamInfo multiResStreamInfo =
748                             mOwner.getStreamInfoForImageReader(reader);
749                     mQueue.offer(new ImageAndMultiResStreamInfo(image, multiResStreamInfo));
750                 }
751             }
752         }
753 
getAnyImageAndInfoAvailable(long timeoutMs)754         public ImageAndMultiResStreamInfo getAnyImageAndInfoAvailable(long timeoutMs)
755                 throws Exception {
756             if (mAcquireLatest) {
757                 Image image = null;
758                 if (mImageAvailable.block(timeoutMs)) {
759                     synchronized (mLock) {
760                         if (mLastReader != null) {
761                             image = mLastReader.acquireLatestImage();
762                             if (VERBOSE) Log.v(TAG, "acquireLatestImage from "
763                                     + mLastReader.toString() + " produces " + image);
764                         } else {
765                             fail("invalid image reader");
766                         }
767                     }
768                     mImageAvailable.close();
769                 } else {
770                     fail("wait for image available time out after " + timeoutMs + "ms");
771                 }
772                 return image == null ? null : new ImageAndMultiResStreamInfo(image,
773                         mOwner.getStreamInfoForImageReader(mLastReader));
774             } else {
775                 ImageAndMultiResStreamInfo imageAndInfo = mQueue.poll(timeoutMs,
776                         java.util.concurrent.TimeUnit.MILLISECONDS);
777                 if (imageAndInfo == null) {
778                     fail("wait for image available timed out after " + timeoutMs + "ms");
779                 }
780                 return imageAndInfo;
781             }
782         }
783 
reset()784         public void reset() {
785             while (!mQueue.isEmpty()) {
786                 ImageAndMultiResStreamInfo imageAndInfo = mQueue.poll();
787                 assertNotNull("Acquired image is not valid", imageAndInfo.image);
788                 imageAndInfo.image.close();
789             }
790             mImageAvailable.close();
791             mLastReader = null;
792         }
793 
794         private LinkedBlockingQueue<ImageAndMultiResStreamInfo> mQueue =
795                 new LinkedBlockingQueue<ImageAndMultiResStreamInfo>();
796         private final MultiResolutionImageReader mOwner;
797         private final int mMaxBuffers;
798         private final boolean mAcquireLatest;
799         private ConditionVariable mImageAvailable = new ConditionVariable();
800         private ImageReader mLastReader = null;
801         private final Object mLock = new Object();
802     }
803 
804     public static class SimpleCaptureCallback extends CameraCaptureSession.CaptureCallback {
805         private final LinkedBlockingQueue<TotalCaptureResult> mQueue =
806                 new LinkedBlockingQueue<TotalCaptureResult>();
807         private final LinkedBlockingQueue<CaptureFailure> mFailureQueue =
808                 new LinkedBlockingQueue<>();
809         // (Surface, framenumber) pair for lost buffers
810         private final LinkedBlockingQueue<Pair<Surface, Long>> mBufferLostQueue =
811                 new LinkedBlockingQueue<>();
812         private final LinkedBlockingQueue<Integer> mAbortQueue =
813                 new LinkedBlockingQueue<>();
814         // Pair<CaptureRequest, Long> is a pair of capture request and timestamp.
815         private final LinkedBlockingQueue<Pair<CaptureRequest, Long>> mCaptureStartQueue =
816                 new LinkedBlockingQueue<>();
817         // Pair<Int, Long> is a pair of sequence id and frame number
818         private final LinkedBlockingQueue<Pair<Integer, Long>> mCaptureSequenceCompletedQueue =
819                 new LinkedBlockingQueue<>();
820 
821         private AtomicLong mNumFramesArrived = new AtomicLong(0);
822 
823         @Override
onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber)824         public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
825                 long timestamp, long frameNumber) {
826             try {
827                 mCaptureStartQueue.put(new Pair(request, timestamp));
828             } catch (InterruptedException e) {
829                 throw new UnsupportedOperationException(
830                         "Can't handle InterruptedException in onCaptureStarted");
831             }
832         }
833 
834         @Override
onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result)835         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
836                 TotalCaptureResult result) {
837             try {
838                 mNumFramesArrived.incrementAndGet();
839                 mQueue.put(result);
840             } catch (InterruptedException e) {
841                 throw new UnsupportedOperationException(
842                         "Can't handle InterruptedException in onCaptureCompleted");
843             }
844         }
845 
846         @Override
onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure)847         public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
848                 CaptureFailure failure) {
849             try {
850                 mFailureQueue.put(failure);
851             } catch (InterruptedException e) {
852                 throw new UnsupportedOperationException(
853                         "Can't handle InterruptedException in onCaptureFailed");
854             }
855         }
856 
857         @Override
onCaptureSequenceAborted(CameraCaptureSession session, int sequenceId)858         public void onCaptureSequenceAborted(CameraCaptureSession session, int sequenceId) {
859             try {
860                 mAbortQueue.put(sequenceId);
861             } catch (InterruptedException e) {
862                 throw new UnsupportedOperationException(
863                         "Can't handle InterruptedException in onCaptureAborted");
864             }
865         }
866 
867         @Override
onCaptureSequenceCompleted(CameraCaptureSession session, int sequenceId, long frameNumber)868         public void onCaptureSequenceCompleted(CameraCaptureSession session, int sequenceId,
869                 long frameNumber) {
870             try {
871                 mCaptureSequenceCompletedQueue.put(new Pair(sequenceId, frameNumber));
872             } catch (InterruptedException e) {
873                 throw new UnsupportedOperationException(
874                         "Can't handle InterruptedException in onCaptureSequenceCompleted");
875             }
876         }
877 
878         @Override
onCaptureBufferLost(CameraCaptureSession session, CaptureRequest request, Surface target, long frameNumber)879         public void onCaptureBufferLost(CameraCaptureSession session,
880                 CaptureRequest request, Surface target, long frameNumber) {
881             try {
882                 mBufferLostQueue.put(new Pair<>(target, frameNumber));
883             } catch (InterruptedException e) {
884                 throw new UnsupportedOperationException(
885                         "Can't handle InterruptedException in onCaptureBufferLost");
886             }
887         }
888 
getTotalNumFrames()889         public long getTotalNumFrames() {
890             return mNumFramesArrived.get();
891         }
892 
getCaptureResult(long timeout)893         public CaptureResult getCaptureResult(long timeout) {
894             return getTotalCaptureResult(timeout);
895         }
896 
getCaptureResult(long timeout, long timestamp)897         public TotalCaptureResult getCaptureResult(long timeout, long timestamp) {
898             try {
899                 long currentTs = -1L;
900                 TotalCaptureResult result;
901                 while (true) {
902                     result = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
903                     if (result == null) {
904                         throw new RuntimeException(
905                                 "Wait for a capture result timed out in " + timeout + "ms");
906                     }
907                     currentTs = result.get(CaptureResult.SENSOR_TIMESTAMP);
908                     if (currentTs == timestamp) {
909                         return result;
910                     }
911                 }
912 
913             } catch (InterruptedException e) {
914                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
915             }
916         }
917 
getTotalCaptureResult(long timeout)918         public TotalCaptureResult getTotalCaptureResult(long timeout) {
919             try {
920                 TotalCaptureResult result = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
921                 assertNotNull("Wait for a capture result timed out in " + timeout + "ms", result);
922                 return result;
923             } catch (InterruptedException e) {
924                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
925             }
926         }
927 
928         /**
929          * Get the {@link #CaptureResult capture result} for a given
930          * {@link #CaptureRequest capture request}.
931          *
932          * @param myRequest The {@link #CaptureRequest capture request} whose
933          *            corresponding {@link #CaptureResult capture result} was
934          *            being waited for
935          * @param numResultsWait Number of frames to wait for the capture result
936          *            before timeout.
937          * @throws TimeoutRuntimeException If more than numResultsWait results are
938          *            seen before the result matching myRequest arrives, or each
939          *            individual wait for result times out after
940          *            {@value #CAPTURE_RESULT_TIMEOUT_MS}ms.
941          */
getCaptureResultForRequest(CaptureRequest myRequest, int numResultsWait)942         public CaptureResult getCaptureResultForRequest(CaptureRequest myRequest,
943                 int numResultsWait) {
944             return getTotalCaptureResultForRequest(myRequest, numResultsWait);
945         }
946 
947         /**
948          * Get the {@link #TotalCaptureResult total capture result} for a given
949          * {@link #CaptureRequest capture request}.
950          *
951          * @param myRequest The {@link #CaptureRequest capture request} whose
952          *            corresponding {@link #TotalCaptureResult capture result} was
953          *            being waited for
954          * @param numResultsWait Number of frames to wait for the capture result
955          *            before timeout.
956          * @throws TimeoutRuntimeException If more than numResultsWait results are
957          *            seen before the result matching myRequest arrives, or each
958          *            individual wait for result times out after
959          *            {@value #CAPTURE_RESULT_TIMEOUT_MS}ms.
960          */
getTotalCaptureResultForRequest(CaptureRequest myRequest, int numResultsWait)961         public TotalCaptureResult getTotalCaptureResultForRequest(CaptureRequest myRequest,
962                 int numResultsWait) {
963             ArrayList<CaptureRequest> captureRequests = new ArrayList<>(1);
964             captureRequests.add(myRequest);
965             return getTotalCaptureResultsForRequests(captureRequests, numResultsWait)[0];
966         }
967 
968         /**
969          * Get an array of {@link #TotalCaptureResult total capture results} for a given list of
970          * {@link #CaptureRequest capture requests}. This can be used when the order of results
971          * may not the same as the order of requests.
972          *
973          * @param captureRequests The list of {@link #CaptureRequest capture requests} whose
974          *            corresponding {@link #TotalCaptureResult capture results} are
975          *            being waited for.
976          * @param numResultsWait Number of frames to wait for the capture results
977          *            before timeout.
978          * @throws TimeoutRuntimeException If more than numResultsWait results are
979          *            seen before all the results matching captureRequests arrives.
980          */
getTotalCaptureResultsForRequests( List<CaptureRequest> captureRequests, int numResultsWait)981         public TotalCaptureResult[] getTotalCaptureResultsForRequests(
982                 List<CaptureRequest> captureRequests, int numResultsWait) {
983             if (numResultsWait < 0) {
984                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
985             }
986             if (captureRequests == null || captureRequests.size() == 0) {
987                 throw new IllegalArgumentException("captureRequests must have at least 1 request.");
988             }
989 
990             // Create a request -> a list of result indices map that it will wait for.
991             HashMap<CaptureRequest, ArrayList<Integer>> remainingResultIndicesMap = new HashMap<>();
992             for (int i = 0; i < captureRequests.size(); i++) {
993                 CaptureRequest request = captureRequests.get(i);
994                 ArrayList<Integer> indices = remainingResultIndicesMap.get(request);
995                 if (indices == null) {
996                     indices = new ArrayList<>();
997                     remainingResultIndicesMap.put(request, indices);
998                 }
999                 indices.add(i);
1000             }
1001 
1002             TotalCaptureResult[] results = new TotalCaptureResult[captureRequests.size()];
1003             int i = 0;
1004             do {
1005                 TotalCaptureResult result = getTotalCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
1006                 CaptureRequest request = result.getRequest();
1007                 ArrayList<Integer> indices = remainingResultIndicesMap.get(request);
1008                 if (indices != null) {
1009                     results[indices.get(0)] = result;
1010                     indices.remove(0);
1011 
1012                     // Remove the entry if all results for this request has been fulfilled.
1013                     if (indices.isEmpty()) {
1014                         remainingResultIndicesMap.remove(request);
1015                     }
1016                 }
1017 
1018                 if (remainingResultIndicesMap.isEmpty()) {
1019                     return results;
1020                 }
1021             } while (i++ < numResultsWait);
1022 
1023             throw new TimeoutRuntimeException("Unable to get the expected capture result after "
1024                     + "waiting for " + numResultsWait + " results");
1025         }
1026 
1027         /**
1028          * Get an array list of {@link #CaptureFailure capture failure} with maxNumFailures entries
1029          * at most. If it times out before maxNumFailures failures are received, return the failures
1030          * received so far.
1031          *
1032          * @param maxNumFailures The maximal number of failures to return. If it times out before
1033          *                       the maximal number of failures are received, return the received
1034          *                       failures so far.
1035          * @throws UnsupportedOperationException If an error happens while waiting on the failure.
1036          */
getCaptureFailures(long maxNumFailures)1037         public ArrayList<CaptureFailure> getCaptureFailures(long maxNumFailures) {
1038             ArrayList<CaptureFailure> failures = new ArrayList<>();
1039             try {
1040                 for (int i = 0; i < maxNumFailures; i++) {
1041                     CaptureFailure failure = mFailureQueue.poll(CAPTURE_RESULT_TIMEOUT_MS,
1042                             TimeUnit.MILLISECONDS);
1043                     if (failure == null) {
1044                         // If waiting on a failure times out, return the failures so far.
1045                         break;
1046                     }
1047                     failures.add(failure);
1048                 }
1049             }  catch (InterruptedException e) {
1050                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1051             }
1052 
1053             return failures;
1054         }
1055 
1056         /**
1057          * Get an array list of lost buffers with maxNumLost entries at most.
1058          * If it times out before maxNumLost buffer lost callbacks are received, return the
1059          * lost callbacks received so far.
1060          *
1061          * @param maxNumLost The maximal number of buffer lost failures to return. If it times out
1062          *                   before the maximal number of failures are received, return the received
1063          *                   buffer lost failures so far.
1064          * @throws UnsupportedOperationException If an error happens while waiting on the failure.
1065          */
getLostBuffers(long maxNumLost)1066         public ArrayList<Pair<Surface, Long>> getLostBuffers(long maxNumLost) {
1067             ArrayList<Pair<Surface, Long>> failures = new ArrayList<>();
1068             try {
1069                 for (int i = 0; i < maxNumLost; i++) {
1070                     Pair<Surface, Long> failure = mBufferLostQueue.poll(CAPTURE_RESULT_TIMEOUT_MS,
1071                             TimeUnit.MILLISECONDS);
1072                     if (failure == null) {
1073                         // If waiting on a failure times out, return the failures so far.
1074                         break;
1075                     }
1076                     failures.add(failure);
1077                 }
1078             }  catch (InterruptedException e) {
1079                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1080             }
1081 
1082             return failures;
1083         }
1084 
1085         /**
1086          * Get an array list of aborted capture sequence ids with maxNumAborts entries
1087          * at most. If it times out before maxNumAborts are received, return the aborted sequences
1088          * received so far.
1089          *
1090          * @param maxNumAborts The maximal number of aborted sequences to return. If it times out
1091          *                     before the maximal number of aborts are received, return the received
1092          *                     failed sequences so far.
1093          * @throws UnsupportedOperationException If an error happens while waiting on the failed
1094          *                                       sequences.
1095          */
geAbortedSequences(long maxNumAborts)1096         public ArrayList<Integer> geAbortedSequences(long maxNumAborts) {
1097             ArrayList<Integer> abortList = new ArrayList<>();
1098             try {
1099                 for (int i = 0; i < maxNumAborts; i++) {
1100                     Integer abortSequence = mAbortQueue.poll(CAPTURE_RESULT_TIMEOUT_MS,
1101                             TimeUnit.MILLISECONDS);
1102                     if (abortSequence == null) {
1103                         break;
1104                     }
1105                     abortList.add(abortSequence);
1106                 }
1107             }  catch (InterruptedException e) {
1108                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1109             }
1110 
1111             return abortList;
1112         }
1113 
1114         /**
1115          * Wait until the capture start of a request and expected timestamp arrives or it times
1116          * out after a number of capture starts.
1117          *
1118          * @param request The request for the capture start to wait for.
1119          * @param timestamp The timestamp for the capture start to wait for.
1120          * @param numCaptureStartsWait The number of capture start events to wait for before timing
1121          *                             out.
1122          */
waitForCaptureStart(CaptureRequest request, Long timestamp, int numCaptureStartsWait)1123         public void waitForCaptureStart(CaptureRequest request, Long timestamp,
1124                 int numCaptureStartsWait) throws Exception {
1125             Pair<CaptureRequest, Long> expectedShutter = new Pair<>(request, timestamp);
1126 
1127             int i = 0;
1128             do {
1129                 Pair<CaptureRequest, Long> shutter = mCaptureStartQueue.poll(
1130                         CAPTURE_RESULT_TIMEOUT_MS, TimeUnit.MILLISECONDS);
1131 
1132                 if (shutter == null) {
1133                     throw new TimeoutRuntimeException("Unable to get any more capture start " +
1134                             "event after waiting for " + CAPTURE_RESULT_TIMEOUT_MS + " ms.");
1135                 } else if (expectedShutter.equals(shutter)) {
1136                     return;
1137                 }
1138 
1139             } while (i++ < numCaptureStartsWait);
1140 
1141             throw new TimeoutRuntimeException("Unable to get the expected capture start " +
1142                     "event after waiting for " + numCaptureStartsWait + " capture starts");
1143         }
1144 
1145         /**
1146          * Wait until it receives capture sequence completed callback for a given squence ID.
1147          *
1148          * @param sequenceId The sequence ID of the capture sequence completed callback to wait for.
1149          * @param timeoutMs Time to wait for each capture sequence complete callback before
1150          *                  timing out.
1151          */
getCaptureSequenceLastFrameNumber(int sequenceId, long timeoutMs)1152         public long getCaptureSequenceLastFrameNumber(int sequenceId, long timeoutMs) {
1153             try {
1154                 while (true) {
1155                     Pair<Integer, Long> completedSequence =
1156                             mCaptureSequenceCompletedQueue.poll(timeoutMs, TimeUnit.MILLISECONDS);
1157                     assertNotNull("Wait for a capture sequence completed timed out in " +
1158                             timeoutMs + "ms", completedSequence);
1159 
1160                     if (completedSequence.first.equals(sequenceId)) {
1161                         return completedSequence.second.longValue();
1162                     }
1163                 }
1164             } catch (InterruptedException e) {
1165                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1166             }
1167         }
1168 
hasMoreResults()1169         public boolean hasMoreResults()
1170         {
1171             return !mQueue.isEmpty();
1172         }
1173 
hasMoreFailures()1174         public boolean hasMoreFailures()
1175         {
1176             return !mFailureQueue.isEmpty();
1177         }
1178 
getNumLostBuffers()1179         public int getNumLostBuffers()
1180         {
1181             return mBufferLostQueue.size();
1182         }
1183 
hasMoreAbortedSequences()1184         public boolean hasMoreAbortedSequences()
1185         {
1186             return !mAbortQueue.isEmpty();
1187         }
1188 
getCaptureStartTimestamps(int count)1189         public List<Long> getCaptureStartTimestamps(int count) {
1190             Iterator<Pair<CaptureRequest, Long>> iter = mCaptureStartQueue.iterator();
1191             List<Long> timestamps = new ArrayList<Long>();
1192             try {
1193                 while (timestamps.size() < count) {
1194                     Pair<CaptureRequest, Long> captureStart = mCaptureStartQueue.poll(
1195                             CAPTURE_RESULT_TIMEOUT_MS, TimeUnit.MILLISECONDS);
1196                     assertNotNull("Wait for a capture start timed out in "
1197                             + CAPTURE_RESULT_TIMEOUT_MS + "ms", captureStart);
1198 
1199                     timestamps.add(captureStart.second);
1200                 }
1201                 return timestamps;
1202             } catch (InterruptedException e) {
1203                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1204             }
1205         }
1206 
drain()1207         public void drain() {
1208             mQueue.clear();
1209             mNumFramesArrived.getAndSet(0);
1210             mFailureQueue.clear();
1211             mBufferLostQueue.clear();
1212             mCaptureStartQueue.clear();
1213             mAbortQueue.clear();
1214         }
1215     }
1216 
hasCapability(CameraCharacteristics characteristics, int capability)1217     public static boolean hasCapability(CameraCharacteristics characteristics, int capability) {
1218         int [] capabilities =
1219                 characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
1220         for (int c : capabilities) {
1221             if (c == capability) {
1222                 return true;
1223             }
1224         }
1225         return false;
1226     }
1227 
isSystemCamera(CameraManager manager, String cameraId)1228     public static boolean isSystemCamera(CameraManager manager, String cameraId)
1229             throws CameraAccessException {
1230         CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
1231         return hasCapability(characteristics,
1232                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA);
1233     }
1234 
getCameraIdListForTesting(CameraManager manager, boolean getSystemCameras)1235     public static String[] getCameraIdListForTesting(CameraManager manager,
1236             boolean getSystemCameras)
1237             throws CameraAccessException {
1238         String [] ids = manager.getCameraIdListNoLazy();
1239         List<String> idsForTesting = new ArrayList<String>();
1240         for (String id : ids) {
1241             boolean isSystemCamera = isSystemCamera(manager, id);
1242             if (getSystemCameras == isSystemCamera) {
1243                 idsForTesting.add(id);
1244             }
1245         }
1246         return idsForTesting.toArray(new String[idsForTesting.size()]);
1247     }
1248 
getConcurrentCameraIds(CameraManager manager, boolean getSystemCameras)1249     public static Set<Set<String>> getConcurrentCameraIds(CameraManager manager,
1250             boolean getSystemCameras)
1251             throws CameraAccessException {
1252         Set<String> cameraIds = new HashSet<String>(Arrays.asList(getCameraIdListForTesting(manager, getSystemCameras)));
1253         Set<Set<String>> combinations =  manager.getConcurrentCameraIds();
1254         Set<Set<String>> correctComb = new HashSet<Set<String>>();
1255         for (Set<String> comb : combinations) {
1256             Set<String> filteredIds = new HashSet<String>();
1257             for (String id : comb) {
1258                 if (cameraIds.contains(id)) {
1259                     filteredIds.add(id);
1260                 }
1261             }
1262             if (filteredIds.isEmpty()) {
1263                 continue;
1264             }
1265             correctComb.add(filteredIds);
1266         }
1267         return correctComb;
1268     }
1269 
1270     /**
1271      * Block until the camera is opened.
1272      *
1273      * <p>Don't use this to test #onDisconnected/#onError since this will throw
1274      * an AssertionError if it fails to open the camera device.</p>
1275      *
1276      * @return CameraDevice opened camera device
1277      *
1278      * @throws IllegalArgumentException
1279      *            If the handler is null, or if the handler's looper is current.
1280      * @throws CameraAccessException
1281      *            If open fails immediately.
1282      * @throws BlockingOpenException
1283      *            If open fails after blocking for some amount of time.
1284      * @throws TimeoutRuntimeException
1285      *            If opening times out. Typically unrecoverable.
1286      */
openCamera(CameraManager manager, String cameraId, CameraDevice.StateCallback listener, Handler handler)1287     public static CameraDevice openCamera(CameraManager manager, String cameraId,
1288             CameraDevice.StateCallback listener, Handler handler) throws CameraAccessException,
1289             BlockingOpenException {
1290 
1291         /**
1292          * Although camera2 API allows 'null' Handler (it will just use the current
1293          * thread's Looper), this is not what we want for CTS.
1294          *
1295          * In CTS the default looper is used only to process events in between test runs,
1296          * so anything sent there would not be executed inside a test and the test would fail.
1297          *
1298          * In this case, BlockingCameraManager#openCamera performs the check for us.
1299          */
1300         return (new BlockingCameraManager(manager)).openCamera(cameraId, listener, handler);
1301     }
1302 
1303 
1304     /**
1305      * Block until the camera is opened.
1306      *
1307      * <p>Don't use this to test #onDisconnected/#onError since this will throw
1308      * an AssertionError if it fails to open the camera device.</p>
1309      *
1310      * @throws IllegalArgumentException
1311      *            If the handler is null, or if the handler's looper is current.
1312      * @throws CameraAccessException
1313      *            If open fails immediately.
1314      * @throws BlockingOpenException
1315      *            If open fails after blocking for some amount of time.
1316      * @throws TimeoutRuntimeException
1317      *            If opening times out. Typically unrecoverable.
1318      */
openCamera(CameraManager manager, String cameraId, Handler handler)1319     public static CameraDevice openCamera(CameraManager manager, String cameraId, Handler handler)
1320             throws CameraAccessException,
1321             BlockingOpenException {
1322         return openCamera(manager, cameraId, /*listener*/null, handler);
1323     }
1324 
1325     /**
1326      * Configure a new camera session with output surfaces and type.
1327      *
1328      * @param camera The CameraDevice to be configured.
1329      * @param outputSurfaces The surface list that used for camera output.
1330      * @param listener The callback CameraDevice will notify when capture results are available.
1331      */
configureCameraSession(CameraDevice camera, List<Surface> outputSurfaces, boolean isHighSpeed, CameraCaptureSession.StateCallback listener, Handler handler)1332     public static CameraCaptureSession configureCameraSession(CameraDevice camera,
1333             List<Surface> outputSurfaces, boolean isHighSpeed,
1334             CameraCaptureSession.StateCallback listener, Handler handler)
1335             throws CameraAccessException {
1336         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1337         if (isHighSpeed) {
1338             camera.createConstrainedHighSpeedCaptureSession(outputSurfaces,
1339                     sessionListener, handler);
1340         } else {
1341             camera.createCaptureSession(outputSurfaces, sessionListener, handler);
1342         }
1343         CameraCaptureSession session =
1344                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1345         assertFalse("Camera session should not be a reprocessable session",
1346                 session.isReprocessable());
1347         String sessionType = isHighSpeed ? "High Speed" : "Normal";
1348         assertTrue("Capture session type must be " + sessionType,
1349                 isHighSpeed ==
1350                 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom(session.getClass()));
1351 
1352         return session;
1353     }
1354 
1355     /**
1356      * Build a new constrained camera session with output surfaces, type and recording session
1357      * parameters.
1358      *
1359      * @param camera The CameraDevice to be configured.
1360      * @param outputSurfaces The surface list that used for camera output.
1361      * @param listener The callback CameraDevice will notify when capture results are available.
1362      * @param initialRequest Initial request settings to use as session parameters.
1363      */
buildConstrainedCameraSession(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler, CaptureRequest initialRequest)1364     public static CameraCaptureSession buildConstrainedCameraSession(CameraDevice camera,
1365             List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener,
1366             Handler handler, CaptureRequest initialRequest) throws CameraAccessException {
1367         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1368 
1369         List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size());
1370         for (Surface surface : outputSurfaces) {
1371             outConfigurations.add(new OutputConfiguration(surface));
1372         }
1373         SessionConfiguration sessionConfig = new SessionConfiguration(
1374                 SessionConfiguration.SESSION_HIGH_SPEED, outConfigurations,
1375                 new HandlerExecutor(handler), sessionListener);
1376         sessionConfig.setSessionParameters(initialRequest);
1377         camera.createCaptureSession(sessionConfig);
1378 
1379         CameraCaptureSession session =
1380                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1381         assertFalse("Camera session should not be a reprocessable session",
1382                 session.isReprocessable());
1383         assertTrue("Capture session type must be High Speed",
1384                 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom(
1385                         session.getClass()));
1386 
1387         return session;
1388     }
1389 
1390     /**
1391      * Configure a new camera session with output configurations.
1392      *
1393      * @param camera The CameraDevice to be configured.
1394      * @param outputs The OutputConfiguration list that is used for camera output.
1395      * @param listener The callback CameraDevice will notify when capture results are available.
1396      */
configureCameraSessionWithConfig(CameraDevice camera, List<OutputConfiguration> outputs, CameraCaptureSession.StateCallback listener, Handler handler)1397     public static CameraCaptureSession configureCameraSessionWithConfig(CameraDevice camera,
1398             List<OutputConfiguration> outputs,
1399             CameraCaptureSession.StateCallback listener, Handler handler)
1400             throws CameraAccessException {
1401         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1402         camera.createCaptureSessionByOutputConfigurations(outputs, sessionListener, handler);
1403         CameraCaptureSession session =
1404                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1405         assertFalse("Camera session should not be a reprocessable session",
1406                 session.isReprocessable());
1407         return session;
1408     }
1409 
1410     /**
1411      * Try configure a new camera session with output configurations.
1412      *
1413      * @param camera The CameraDevice to be configured.
1414      * @param outputs The OutputConfiguration list that is used for camera output.
1415      * @param initialRequest The session parameters passed in during stream configuration
1416      * @param listener The callback CameraDevice will notify when capture results are available.
1417      */
tryConfigureCameraSessionWithConfig(CameraDevice camera, List<OutputConfiguration> outputs, CaptureRequest initialRequest, CameraCaptureSession.StateCallback listener, Handler handler)1418     public static CameraCaptureSession tryConfigureCameraSessionWithConfig(CameraDevice camera,
1419             List<OutputConfiguration> outputs, CaptureRequest initialRequest,
1420             CameraCaptureSession.StateCallback listener, Handler handler)
1421             throws CameraAccessException {
1422         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1423         SessionConfiguration sessionConfig = new SessionConfiguration(
1424                 SessionConfiguration.SESSION_REGULAR, outputs, new HandlerExecutor(handler),
1425                 sessionListener);
1426         sessionConfig.setSessionParameters(initialRequest);
1427         camera.createCaptureSession(sessionConfig);
1428 
1429         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
1430                                    BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
1431         int state = sessionListener.getStateWaiter().waitForAnyOfStates(
1432                 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS);
1433 
1434         CameraCaptureSession session = null;
1435         if (state == BlockingSessionCallback.SESSION_READY) {
1436             session = sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1437             assertFalse("Camera session should not be a reprocessable session",
1438                     session.isReprocessable());
1439         }
1440         return session;
1441     }
1442 
1443     /**
1444      * Configure a new camera session with output surfaces and initial session parameters.
1445      *
1446      * @param camera The CameraDevice to be configured.
1447      * @param outputSurfaces The surface list that used for camera output.
1448      * @param listener The callback CameraDevice will notify when session is available.
1449      * @param handler The handler used to notify callbacks.
1450      * @param initialRequest Initial request settings to use as session parameters.
1451      */
configureCameraSessionWithParameters(CameraDevice camera, List<Surface> outputSurfaces, BlockingSessionCallback listener, Handler handler, CaptureRequest initialRequest)1452     public static CameraCaptureSession configureCameraSessionWithParameters(CameraDevice camera,
1453             List<Surface> outputSurfaces, BlockingSessionCallback listener,
1454             Handler handler, CaptureRequest initialRequest) throws CameraAccessException {
1455         List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size());
1456         for (Surface surface : outputSurfaces) {
1457             outConfigurations.add(new OutputConfiguration(surface));
1458         }
1459         SessionConfiguration sessionConfig = new SessionConfiguration(
1460                 SessionConfiguration.SESSION_REGULAR, outConfigurations,
1461                 new HandlerExecutor(handler), listener);
1462         sessionConfig.setSessionParameters(initialRequest);
1463         camera.createCaptureSession(sessionConfig);
1464 
1465         CameraCaptureSession session = listener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1466         assertFalse("Camera session should not be a reprocessable session",
1467                 session.isReprocessable());
1468         assertFalse("Capture session type must be regular",
1469                 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom(
1470                         session.getClass()));
1471 
1472         return session;
1473     }
1474 
1475     /**
1476      * Configure a new camera session with output surfaces.
1477      *
1478      * @param camera The CameraDevice to be configured.
1479      * @param outputSurfaces The surface list that used for camera output.
1480      * @param listener The callback CameraDevice will notify when capture results are available.
1481      */
configureCameraSession(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)1482     public static CameraCaptureSession configureCameraSession(CameraDevice camera,
1483             List<Surface> outputSurfaces,
1484             CameraCaptureSession.StateCallback listener, Handler handler)
1485             throws CameraAccessException {
1486 
1487         return configureCameraSession(camera, outputSurfaces, /*isHighSpeed*/false,
1488                 listener, handler);
1489     }
1490 
configureReprocessableCameraSession(CameraDevice camera, InputConfiguration inputConfiguration, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)1491     public static CameraCaptureSession configureReprocessableCameraSession(CameraDevice camera,
1492             InputConfiguration inputConfiguration, List<Surface> outputSurfaces,
1493             CameraCaptureSession.StateCallback listener, Handler handler)
1494             throws CameraAccessException {
1495         List<OutputConfiguration> outputConfigs = new ArrayList<OutputConfiguration>();
1496         for (Surface surface : outputSurfaces) {
1497             outputConfigs.add(new OutputConfiguration(surface));
1498         }
1499         CameraCaptureSession session = configureReprocessableCameraSessionWithConfigurations(
1500                 camera, inputConfiguration, outputConfigs, listener, handler);
1501 
1502         return session;
1503     }
1504 
configureReprocessableCameraSessionWithConfigurations( CameraDevice camera, InputConfiguration inputConfiguration, List<OutputConfiguration> outputConfigs, CameraCaptureSession.StateCallback listener, Handler handler)1505     public static CameraCaptureSession configureReprocessableCameraSessionWithConfigurations(
1506             CameraDevice camera, InputConfiguration inputConfiguration,
1507             List<OutputConfiguration> outputConfigs, CameraCaptureSession.StateCallback listener,
1508             Handler handler) throws CameraAccessException {
1509         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1510         SessionConfiguration sessionConfig = new SessionConfiguration(
1511                 SessionConfiguration.SESSION_REGULAR, outputConfigs, new HandlerExecutor(handler),
1512                 sessionListener);
1513         sessionConfig.setInputConfiguration(inputConfiguration);
1514         camera.createCaptureSession(sessionConfig);
1515 
1516         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
1517                                    BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
1518         int state = sessionListener.getStateWaiter().waitForAnyOfStates(
1519                 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS);
1520 
1521         assertTrue("Creating a reprocessable session failed.",
1522                 state == BlockingSessionCallback.SESSION_READY);
1523         CameraCaptureSession session =
1524                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1525         assertTrue("Camera session should be a reprocessable session", session.isReprocessable());
1526 
1527         return session;
1528     }
1529 
1530     /**
1531      * Create a reprocessable camera session with input and output configurations.
1532      *
1533      * @param camera The CameraDevice to be configured.
1534      * @param inputConfiguration The input configuration used to create this session.
1535      * @param outputs The output configurations used to create this session.
1536      * @param listener The callback CameraDevice will notify when capture results are available.
1537      * @param handler The handler used to notify callbacks.
1538      * @return The session ready to use.
1539      * @throws CameraAccessException
1540      */
configureReprocCameraSessionWithConfig(CameraDevice camera, InputConfiguration inputConfiguration, List<OutputConfiguration> outputs, CameraCaptureSession.StateCallback listener, Handler handler)1541     public static CameraCaptureSession configureReprocCameraSessionWithConfig(CameraDevice camera,
1542             InputConfiguration inputConfiguration, List<OutputConfiguration> outputs,
1543             CameraCaptureSession.StateCallback listener, Handler handler)
1544             throws CameraAccessException {
1545         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1546         camera.createReprocessableCaptureSessionByConfigurations(inputConfiguration, outputs,
1547                 sessionListener, handler);
1548 
1549         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
1550                                    BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
1551         int state = sessionListener.getStateWaiter().waitForAnyOfStates(
1552                 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS);
1553 
1554         assertTrue("Creating a reprocessable session failed.",
1555                 state == BlockingSessionCallback.SESSION_READY);
1556 
1557         CameraCaptureSession session =
1558                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1559         assertTrue("Camera session should be a reprocessable session", session.isReprocessable());
1560 
1561         return session;
1562     }
1563 
assertArrayNotEmpty(T arr, String message)1564     public static <T> void assertArrayNotEmpty(T arr, String message) {
1565         assertTrue(message, arr != null && Array.getLength(arr) > 0);
1566     }
1567 
1568     /**
1569      * Check if the format is a legal YUV format camera supported.
1570      */
checkYuvFormat(int format)1571     public static void checkYuvFormat(int format) {
1572         if ((format != ImageFormat.YUV_420_888) &&
1573                 (format != ImageFormat.NV21) &&
1574                 (format != ImageFormat.YV12)) {
1575             fail("Wrong formats: " + format);
1576         }
1577     }
1578 
1579     /**
1580      * Check if image size and format match given size and format.
1581      */
checkImage(Image image, int width, int height, int format)1582     public static void checkImage(Image image, int width, int height, int format) {
1583         // Image reader will wrap YV12/NV21 image by YUV_420_888
1584         if (format == ImageFormat.NV21 || format == ImageFormat.YV12) {
1585             format = ImageFormat.YUV_420_888;
1586         }
1587         assertNotNull("Input image is invalid", image);
1588         assertEquals("Format doesn't match", format, image.getFormat());
1589         assertEquals("Width doesn't match", width, image.getWidth());
1590         assertEquals("Height doesn't match", height, image.getHeight());
1591     }
1592 
1593     /**
1594      * <p>Read data from all planes of an Image into a contiguous unpadded, unpacked
1595      * 1-D linear byte array, such that it can be write into disk, or accessed by
1596      * software conveniently. It supports YUV_420_888/NV21/YV12 and JPEG input
1597      * Image format.</p>
1598      *
1599      * <p>For YUV_420_888/NV21/YV12/Y8/Y16, it returns a byte array that contains
1600      * the Y plane data first, followed by U(Cb), V(Cr) planes if there is any
1601      * (xstride = width, ystride = height for chroma and luma components).</p>
1602      *
1603      * <p>For JPEG, it returns a 1-D byte array contains a complete JPEG image.</p>
1604      *
1605      * <p>For YUV P010, it returns a byte array that contains Y plane first, followed
1606      * by the interleaved U(Cb)/V(Cr) plane.</p>
1607      */
getDataFromImage(Image image)1608     public static byte[] getDataFromImage(Image image) {
1609         assertNotNull("Invalid image:", image);
1610         int format = image.getFormat();
1611         int width = image.getWidth();
1612         int height = image.getHeight();
1613         int rowStride, pixelStride;
1614         byte[] data = null;
1615 
1616         // Read image data
1617         Plane[] planes = image.getPlanes();
1618         assertTrue("Fail to get image planes", planes != null && planes.length > 0);
1619 
1620         // Check image validity
1621         checkAndroidImageFormat(image);
1622 
1623         ByteBuffer buffer = null;
1624         // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer.
1625         // Same goes for DEPTH_POINT_CLOUD, RAW_PRIVATE, DEPTH_JPEG, and HEIC
1626         if (format == ImageFormat.JPEG || format == ImageFormat.DEPTH_POINT_CLOUD ||
1627                 format == ImageFormat.RAW_PRIVATE || format == ImageFormat.DEPTH_JPEG ||
1628                 format == ImageFormat.HEIC) {
1629             buffer = planes[0].getBuffer();
1630             assertNotNull("Fail to get jpeg/depth/heic ByteBuffer", buffer);
1631             data = new byte[buffer.remaining()];
1632             buffer.get(data);
1633             buffer.rewind();
1634             return data;
1635         } else if (format == ImageFormat.YCBCR_P010) {
1636             // P010 samples are stored within 16 bit values
1637             int offset = 0;
1638             int bytesPerPixelRounded = (ImageFormat.getBitsPerPixel(format) + 7) / 8;
1639             data = new byte[width * height * bytesPerPixelRounded];
1640             assertTrue("Unexpected number of planes, expected " + 3 + " actual " + planes.length,
1641                     planes.length == 3);
1642             for (int i = 0; i < 2; i++) {
1643                 buffer = planes[i].getBuffer();
1644                 assertNotNull("Fail to get bytebuffer from plane", buffer);
1645                 buffer.rewind();
1646                 rowStride = planes[i].getRowStride();
1647                 if (VERBOSE) {
1648                     Log.v(TAG, "rowStride " + rowStride);
1649                     Log.v(TAG, "width " + width);
1650                     Log.v(TAG, "height " + height);
1651                 }
1652                 int h = (i == 0) ? height : height / 2;
1653                 for (int row = 0; row < h; row++) {
1654                     // Each 10-bit pixel occupies 2 bytes
1655                     int length = 2 * width;
1656                     buffer.get(data, offset, length);
1657                     offset += length;
1658                     if (row < h - 1) {
1659                         buffer.position(buffer.position() + rowStride - length);
1660                     }
1661                 }
1662                 if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
1663                 buffer.rewind();
1664             }
1665             return data;
1666         }
1667 
1668         int offset = 0;
1669         data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
1670         int maxRowSize = planes[0].getRowStride();
1671         for (int i = 0; i < planes.length; i++) {
1672             if (maxRowSize < planes[i].getRowStride()) {
1673                 maxRowSize = planes[i].getRowStride();
1674             }
1675         }
1676         byte[] rowData = new byte[maxRowSize];
1677         if(VERBOSE) Log.v(TAG, "get data from " + planes.length + " planes");
1678         for (int i = 0; i < planes.length; i++) {
1679             buffer = planes[i].getBuffer();
1680             assertNotNull("Fail to get bytebuffer from plane", buffer);
1681             buffer.rewind();
1682             rowStride = planes[i].getRowStride();
1683             pixelStride = planes[i].getPixelStride();
1684             assertTrue("pixel stride " + pixelStride + " is invalid", pixelStride > 0);
1685             if (VERBOSE) {
1686                 Log.v(TAG, "pixelStride " + pixelStride);
1687                 Log.v(TAG, "rowStride " + rowStride);
1688                 Log.v(TAG, "width " + width);
1689                 Log.v(TAG, "height " + height);
1690             }
1691             // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling.
1692             int w = (i == 0) ? width : width / 2;
1693             int h = (i == 0) ? height : height / 2;
1694             assertTrue("rowStride " + rowStride + " should be >= width " + w , rowStride >= w);
1695             for (int row = 0; row < h; row++) {
1696                 int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8;
1697                 int length;
1698                 if (pixelStride == bytesPerPixel) {
1699                     // Special case: optimized read of the entire row
1700                     length = w * bytesPerPixel;
1701                     buffer.get(data, offset, length);
1702                     offset += length;
1703                 } else {
1704                     // Generic case: should work for any pixelStride but slower.
1705                     // Use intermediate buffer to avoid read byte-by-byte from
1706                     // DirectByteBuffer, which is very bad for performance
1707                     length = (w - 1) * pixelStride + bytesPerPixel;
1708                     buffer.get(rowData, 0, length);
1709                     for (int col = 0; col < w; col++) {
1710                         data[offset++] = rowData[col * pixelStride];
1711                     }
1712                 }
1713                 // Advance buffer the remainder of the row stride
1714                 if (row < h - 1) {
1715                     buffer.position(buffer.position() + rowStride - length);
1716                 }
1717             }
1718             if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
1719             buffer.rewind();
1720         }
1721         return data;
1722     }
1723 
1724     /**
1725      * <p>Check android image format validity for an image, only support below formats:</p>
1726      *
1727      * <p>YUV_420_888/NV21/YV12, can add more for future</p>
1728      */
checkAndroidImageFormat(Image image)1729     public static void checkAndroidImageFormat(Image image) {
1730         int format = image.getFormat();
1731         Plane[] planes = image.getPlanes();
1732         switch (format) {
1733             case ImageFormat.YUV_420_888:
1734             case ImageFormat.NV21:
1735             case ImageFormat.YV12:
1736             case ImageFormat.YCBCR_P010:
1737                 assertEquals("YUV420 format Images should have 3 planes", 3, planes.length);
1738                 break;
1739             case ImageFormat.JPEG:
1740             case ImageFormat.RAW_SENSOR:
1741             case ImageFormat.RAW_PRIVATE:
1742             case ImageFormat.DEPTH16:
1743             case ImageFormat.DEPTH_POINT_CLOUD:
1744             case ImageFormat.DEPTH_JPEG:
1745             case ImageFormat.Y8:
1746             case ImageFormat.HEIC:
1747                 assertEquals("JPEG/RAW/depth/Y8 Images should have one plane", 1, planes.length);
1748                 break;
1749             default:
1750                 fail("Unsupported Image Format: " + format);
1751         }
1752     }
1753 
dumpFile(String fileName, Bitmap data)1754     public static void dumpFile(String fileName, Bitmap data) {
1755         FileOutputStream outStream;
1756         try {
1757             Log.v(TAG, "output will be saved as " + fileName);
1758             outStream = new FileOutputStream(fileName);
1759         } catch (IOException ioe) {
1760             throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
1761         }
1762 
1763         try {
1764             data.compress(Bitmap.CompressFormat.JPEG, /*quality*/90, outStream);
1765             outStream.close();
1766         } catch (IOException ioe) {
1767             throw new RuntimeException("failed writing data to file " + fileName, ioe);
1768         }
1769     }
1770 
dumpFile(String fileName, byte[] data)1771     public static void dumpFile(String fileName, byte[] data) {
1772         FileOutputStream outStream;
1773         try {
1774             Log.v(TAG, "output will be saved as " + fileName);
1775             outStream = new FileOutputStream(fileName);
1776         } catch (IOException ioe) {
1777             throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
1778         }
1779 
1780         try {
1781             outStream.write(data);
1782             outStream.close();
1783         } catch (IOException ioe) {
1784             throw new RuntimeException("failed writing data to file " + fileName, ioe);
1785         }
1786     }
1787 
1788     /**
1789      * Get the available output sizes for the user-defined {@code format}.
1790      *
1791      * <p>Note that implementation-defined/hidden formats are not supported.</p>
1792      */
getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager)1793     public static Size[] getSupportedSizeForFormat(int format, String cameraId,
1794             CameraManager cameraManager) throws CameraAccessException {
1795         return getSupportedSizeForFormat(format, cameraId, cameraManager,
1796                 /*maxResolution*/false);
1797     }
1798 
getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager, boolean maxResolution)1799     public static Size[] getSupportedSizeForFormat(int format, String cameraId,
1800             CameraManager cameraManager, boolean maxResolution) throws CameraAccessException {
1801         CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
1802         assertNotNull("Can't get camera characteristics!", properties);
1803         if (VERBOSE) {
1804             Log.v(TAG, "get camera characteristics for camera: " + cameraId);
1805         }
1806         CameraCharacteristics.Key<StreamConfigurationMap> configMapTag = maxResolution ?
1807                 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION :
1808                 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
1809         StreamConfigurationMap configMap = properties.get(configMapTag);
1810         if (configMap == null) {
1811             assertTrue("SCALER_STREAM_CONFIGURATION_MAP is null!", maxResolution);
1812             return null;
1813         }
1814 
1815         Size[] availableSizes = configMap.getOutputSizes(format);
1816         if (!maxResolution) {
1817             assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for format: "
1818                     + format);
1819         }
1820         Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(format);
1821         if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
1822             Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
1823             System.arraycopy(availableSizes, 0, allSizes, 0,
1824                     availableSizes.length);
1825             System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length,
1826                     highResAvailableSizes.length);
1827             availableSizes = allSizes;
1828         }
1829         if (VERBOSE) Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
1830         return availableSizes;
1831     }
1832 
1833     /**
1834      * Get the available output sizes for the given class.
1835      *
1836      */
getSupportedSizeForClass(Class klass, String cameraId, CameraManager cameraManager)1837     public static Size[] getSupportedSizeForClass(Class klass, String cameraId,
1838             CameraManager cameraManager) throws CameraAccessException {
1839         CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
1840         assertNotNull("Can't get camera characteristics!", properties);
1841         if (VERBOSE) {
1842             Log.v(TAG, "get camera characteristics for camera: " + cameraId);
1843         }
1844         StreamConfigurationMap configMap =
1845                 properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
1846         Size[] availableSizes = configMap.getOutputSizes(klass);
1847         assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for class: "
1848                 + klass);
1849         Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(ImageFormat.PRIVATE);
1850         if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
1851             Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
1852             System.arraycopy(availableSizes, 0, allSizes, 0,
1853                     availableSizes.length);
1854             System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length,
1855                     highResAvailableSizes.length);
1856             availableSizes = allSizes;
1857         }
1858         if (VERBOSE) Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
1859         return availableSizes;
1860     }
1861 
1862     /**
1863      * Size comparator that compares the number of pixels it covers.
1864      *
1865      * <p>If two the areas of two sizes are same, compare the widths.</p>
1866      */
1867     public static class SizeComparator implements Comparator<Size> {
1868         @Override
compare(Size lhs, Size rhs)1869         public int compare(Size lhs, Size rhs) {
1870             return CameraUtils
1871                     .compareSizes(lhs.getWidth(), lhs.getHeight(), rhs.getWidth(), rhs.getHeight());
1872         }
1873     }
1874 
1875     /**
1876      * Get sorted size list in descending order. Remove the sizes larger than
1877      * the bound. If the bound is null, don't do the size bound filtering.
1878      */
getSupportedPreviewSizes(String cameraId, CameraManager cameraManager, Size bound)1879     static public List<Size> getSupportedPreviewSizes(String cameraId,
1880             CameraManager cameraManager, Size bound) throws CameraAccessException {
1881 
1882         Size[] rawSizes = getSupportedSizeForClass(android.view.SurfaceHolder.class, cameraId,
1883                 cameraManager);
1884         assertArrayNotEmpty(rawSizes,
1885                 "Available sizes for SurfaceHolder class should not be empty");
1886         if (VERBOSE) {
1887             Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(rawSizes));
1888         }
1889 
1890         if (bound == null) {
1891             return getAscendingOrderSizes(Arrays.asList(rawSizes), /*ascending*/false);
1892         }
1893 
1894         List<Size> sizes = new ArrayList<Size>();
1895         for (Size sz: rawSizes) {
1896             if (sz.getWidth() <= bound.getWidth() && sz.getHeight() <= bound.getHeight()) {
1897                 sizes.add(sz);
1898             }
1899         }
1900         return getAscendingOrderSizes(sizes, /*ascending*/false);
1901     }
1902 
1903     /**
1904      * Get a sorted list of sizes from a given size list.
1905      *
1906      * <p>
1907      * The size is compare by area it covers, if the areas are same, then
1908      * compare the widths.
1909      * </p>
1910      *
1911      * @param sizeList The input size list to be sorted
1912      * @param ascending True if the order is ascending, otherwise descending order
1913      * @return The ordered list of sizes
1914      */
getAscendingOrderSizes(final List<Size> sizeList, boolean ascending)1915     static public List<Size> getAscendingOrderSizes(final List<Size> sizeList, boolean ascending) {
1916         if (sizeList == null) {
1917             throw new IllegalArgumentException("sizeList shouldn't be null");
1918         }
1919 
1920         Comparator<Size> comparator = new SizeComparator();
1921         List<Size> sortedSizes = new ArrayList<Size>();
1922         sortedSizes.addAll(sizeList);
1923         Collections.sort(sortedSizes, comparator);
1924         if (!ascending) {
1925             Collections.reverse(sortedSizes);
1926         }
1927 
1928         return sortedSizes;
1929     }
1930     /**
1931      * Get sorted (descending order) size list for given format. Remove the sizes larger than
1932      * the bound. If the bound is null, don't do the size bound filtering.
1933      */
getSortedSizesForFormat(String cameraId, CameraManager cameraManager, int format, Size bound)1934     static public List<Size> getSortedSizesForFormat(String cameraId,
1935             CameraManager cameraManager, int format, Size bound) throws CameraAccessException {
1936         return getSortedSizesForFormat(cameraId, cameraManager, format, /*maxResolution*/false,
1937                 bound);
1938     }
1939 
1940     /**
1941      * Get sorted (descending order) size list for given format (with an option to get sizes from
1942      * the maximum resolution stream configuration map). Remove the sizes larger than
1943      * the bound. If the bound is null, don't do the size bound filtering.
1944      */
getSortedSizesForFormat(String cameraId, CameraManager cameraManager, int format, boolean maxResolution, Size bound)1945     static public List<Size> getSortedSizesForFormat(String cameraId,
1946             CameraManager cameraManager, int format, boolean maxResolution, Size bound)
1947             throws CameraAccessException {
1948         Comparator<Size> comparator = new SizeComparator();
1949         Size[] sizes = getSupportedSizeForFormat(format, cameraId, cameraManager, maxResolution);
1950         List<Size> sortedSizes = null;
1951         if (bound != null) {
1952             sortedSizes = new ArrayList<Size>(/*capacity*/1);
1953             for (Size sz : sizes) {
1954                 if (comparator.compare(sz, bound) <= 0) {
1955                     sortedSizes.add(sz);
1956                 }
1957             }
1958         } else {
1959             sortedSizes = Arrays.asList(sizes);
1960         }
1961         assertTrue("Supported size list should have at least one element",
1962                 sortedSizes.size() > 0);
1963 
1964         Collections.sort(sortedSizes, comparator);
1965         // Make it in descending order.
1966         Collections.reverse(sortedSizes);
1967         return sortedSizes;
1968     }
1969 
1970     /**
1971      * Get supported video size list for a given camera device.
1972      *
1973      * <p>
1974      * Filter out the sizes that are larger than the bound. If the bound is
1975      * null, don't do the size bound filtering.
1976      * </p>
1977      */
getSupportedVideoSizes(String cameraId, CameraManager cameraManager, Size bound)1978     static public List<Size> getSupportedVideoSizes(String cameraId,
1979             CameraManager cameraManager, Size bound) throws CameraAccessException {
1980 
1981         Size[] rawSizes = getSupportedSizeForClass(android.media.MediaRecorder.class,
1982                 cameraId, cameraManager);
1983         assertArrayNotEmpty(rawSizes,
1984                 "Available sizes for MediaRecorder class should not be empty");
1985         if (VERBOSE) {
1986             Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(rawSizes));
1987         }
1988 
1989         if (bound == null) {
1990             return getAscendingOrderSizes(Arrays.asList(rawSizes), /*ascending*/false);
1991         }
1992 
1993         List<Size> sizes = new ArrayList<Size>();
1994         for (Size sz: rawSizes) {
1995             if (sz.getWidth() <= bound.getWidth() && sz.getHeight() <= bound.getHeight()) {
1996                 sizes.add(sz);
1997             }
1998         }
1999         return getAscendingOrderSizes(sizes, /*ascending*/false);
2000     }
2001 
2002     /**
2003      * Get supported video size list (descending order) for a given camera device.
2004      *
2005      * <p>
2006      * Filter out the sizes that are larger than the bound. If the bound is
2007      * null, don't do the size bound filtering.
2008      * </p>
2009      */
getSupportedStillSizes(String cameraId, CameraManager cameraManager, Size bound)2010     static public List<Size> getSupportedStillSizes(String cameraId,
2011             CameraManager cameraManager, Size bound) throws CameraAccessException {
2012         return getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.JPEG, bound);
2013     }
2014 
getSupportedHeicSizes(String cameraId, CameraManager cameraManager, Size bound)2015     static public List<Size> getSupportedHeicSizes(String cameraId,
2016             CameraManager cameraManager, Size bound) throws CameraAccessException {
2017         return getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.HEIC, bound);
2018     }
2019 
getMinPreviewSize(String cameraId, CameraManager cameraManager)2020     static public Size getMinPreviewSize(String cameraId, CameraManager cameraManager)
2021             throws CameraAccessException {
2022         List<Size> sizes = getSupportedPreviewSizes(cameraId, cameraManager, null);
2023         return sizes.get(sizes.size() - 1);
2024     }
2025 
2026     /**
2027      * Get max supported preview size for a camera device.
2028      */
getMaxPreviewSize(String cameraId, CameraManager cameraManager)2029     static public Size getMaxPreviewSize(String cameraId, CameraManager cameraManager)
2030             throws CameraAccessException {
2031         return getMaxPreviewSize(cameraId, cameraManager, /*bound*/null);
2032     }
2033 
2034     /**
2035      * Get max preview size for a camera device in the supported sizes that are no larger
2036      * than the bound.
2037      */
getMaxPreviewSize(String cameraId, CameraManager cameraManager, Size bound)2038     static public Size getMaxPreviewSize(String cameraId, CameraManager cameraManager, Size bound)
2039             throws CameraAccessException {
2040         List<Size> sizes = getSupportedPreviewSizes(cameraId, cameraManager, bound);
2041         return sizes.get(0);
2042     }
2043 
2044     /**
2045      * Get max depth size for a camera device.
2046      */
getMaxDepthSize(String cameraId, CameraManager cameraManager)2047     static public Size getMaxDepthSize(String cameraId, CameraManager cameraManager)
2048             throws CameraAccessException {
2049         List<Size> sizes = getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.DEPTH16,
2050                 /*bound*/ null);
2051         return sizes.get(0);
2052     }
2053 
2054     /**
2055      * Return the lower size
2056      * @param a first size
2057      *
2058      * @param b second size
2059      *
2060      * @return Size the smaller size
2061      *
2062      * @throws IllegalArgumentException if either param was null.
2063      *
2064      */
getMinSize(Size a, Size b)2065     @NonNull public static Size getMinSize(Size a, Size b) {
2066         if (a == null || b == null) {
2067             throw new IllegalArgumentException("sizes was empty");
2068         }
2069         if (a.getWidth() * a.getHeight() < b.getHeight() * b.getWidth()) {
2070             return a;
2071         }
2072         return b;
2073     }
2074 
2075     /**
2076      * Get the largest size by area.
2077      *
2078      * @param sizes an array of sizes, must have at least 1 element
2079      *
2080      * @return Largest Size
2081      *
2082      * @throws IllegalArgumentException if sizes was null or had 0 elements
2083      */
getMaxSize(Size... sizes)2084     public static Size getMaxSize(Size... sizes) {
2085         if (sizes == null || sizes.length == 0) {
2086             throw new IllegalArgumentException("sizes was empty");
2087         }
2088 
2089         Size sz = sizes[0];
2090         for (Size size : sizes) {
2091             if (size.getWidth() * size.getHeight() > sz.getWidth() * sz.getHeight()) {
2092                 sz = size;
2093             }
2094         }
2095 
2096         return sz;
2097     }
2098 
2099     /**
2100      * Get the largest size by area within (less than) bound
2101      *
2102      * @param sizes an array of sizes, must have at least 1 element
2103      *
2104      * @return Largest Size. Null if no such size exists within bound.
2105      *
2106      * @throws IllegalArgumentException if sizes was null or had 0 elements, or bound is invalid.
2107      */
getMaxSizeWithBound(Size[] sizes, int bound)2108     public static Size getMaxSizeWithBound(Size[] sizes, int bound) {
2109         if (sizes == null || sizes.length == 0) {
2110             throw new IllegalArgumentException("sizes was empty");
2111         }
2112         if (bound <= 0) {
2113             throw new IllegalArgumentException("bound is invalid");
2114         }
2115 
2116         Size sz = null;
2117         for (Size size : sizes) {
2118             if (size.getWidth() * size.getHeight() >= bound) {
2119                 continue;
2120             }
2121 
2122             if (sz == null ||
2123                     size.getWidth() * size.getHeight() > sz.getWidth() * sz.getHeight()) {
2124                 sz = size;
2125             }
2126         }
2127 
2128         return sz;
2129     }
2130 
2131     /**
2132      * Returns true if the given {@code array} contains the given element.
2133      *
2134      * @param array {@code array} to check for {@code elem}
2135      * @param elem {@code elem} to test for
2136      * @return {@code true} if the given element is contained
2137      */
contains(int[] array, int elem)2138     public static boolean contains(int[] array, int elem) {
2139         if (array == null) return false;
2140         for (int i = 0; i < array.length; i++) {
2141             if (elem == array[i]) return true;
2142         }
2143         return false;
2144     }
2145 
2146     /**
2147      * Get object array from byte array.
2148      *
2149      * @param array Input byte array to be converted
2150      * @return Byte object array converted from input byte array
2151      */
toObject(byte[] array)2152     public static Byte[] toObject(byte[] array) {
2153         return convertPrimitiveArrayToObjectArray(array, Byte.class);
2154     }
2155 
2156     /**
2157      * Get object array from int array.
2158      *
2159      * @param array Input int array to be converted
2160      * @return Integer object array converted from input int array
2161      */
toObject(int[] array)2162     public static Integer[] toObject(int[] array) {
2163         return convertPrimitiveArrayToObjectArray(array, Integer.class);
2164     }
2165 
2166     /**
2167      * Get object array from float array.
2168      *
2169      * @param array Input float array to be converted
2170      * @return Float object array converted from input float array
2171      */
toObject(float[] array)2172     public static Float[] toObject(float[] array) {
2173         return convertPrimitiveArrayToObjectArray(array, Float.class);
2174     }
2175 
2176     /**
2177      * Get object array from double array.
2178      *
2179      * @param array Input double array to be converted
2180      * @return Double object array converted from input double array
2181      */
toObject(double[] array)2182     public static Double[] toObject(double[] array) {
2183         return convertPrimitiveArrayToObjectArray(array, Double.class);
2184     }
2185 
2186     /**
2187      * Convert a primitive input array into its object array version (e.g. from int[] to Integer[]).
2188      *
2189      * @param array Input array object
2190      * @param wrapperClass The boxed class it converts to
2191      * @return Boxed version of primitive array
2192      */
convertPrimitiveArrayToObjectArray(final Object array, final Class<T> wrapperClass)2193     private static <T> T[] convertPrimitiveArrayToObjectArray(final Object array,
2194             final Class<T> wrapperClass) {
2195         // getLength does the null check and isArray check already.
2196         int arrayLength = Array.getLength(array);
2197         if (arrayLength == 0) {
2198             throw new IllegalArgumentException("Input array shouldn't be empty");
2199         }
2200 
2201         @SuppressWarnings("unchecked")
2202         final T[] result = (T[]) Array.newInstance(wrapperClass, arrayLength);
2203         for (int i = 0; i < arrayLength; i++) {
2204             Array.set(result, i, Array.get(array, i));
2205         }
2206         return result;
2207     }
2208 
2209     /**
2210      * Update one 3A region in capture request builder if that region is supported. Do nothing
2211      * if the specified 3A region is not supported by camera device.
2212      * @param requestBuilder The request to be updated
2213      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
2214      * @param regions The 3A regions to be set
2215      * @param staticInfo static metadata characteristics
2216      */
update3aRegion( CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions, StaticMetadata staticInfo)2217     public static void update3aRegion(
2218             CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions,
2219             StaticMetadata staticInfo)
2220     {
2221         int maxRegions;
2222         CaptureRequest.Key<MeteringRectangle[]> key;
2223 
2224         if (regions == null || regions.length == 0 || staticInfo == null) {
2225             throw new IllegalArgumentException("Invalid input 3A region!");
2226         }
2227 
2228         switch (algoIdx) {
2229             case INDEX_ALGORITHM_AE:
2230                 maxRegions = staticInfo.getAeMaxRegionsChecked();
2231                 key = CaptureRequest.CONTROL_AE_REGIONS;
2232                 break;
2233             case INDEX_ALGORITHM_AWB:
2234                 maxRegions = staticInfo.getAwbMaxRegionsChecked();
2235                 key = CaptureRequest.CONTROL_AWB_REGIONS;
2236                 break;
2237             case INDEX_ALGORITHM_AF:
2238                 maxRegions = staticInfo.getAfMaxRegionsChecked();
2239                 key = CaptureRequest.CONTROL_AF_REGIONS;
2240                 break;
2241             default:
2242                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
2243         }
2244 
2245         if (maxRegions >= regions.length) {
2246             requestBuilder.set(key, regions);
2247         }
2248     }
2249 
2250     /**
2251      * Validate one 3A region in capture result equals to expected region if that region is
2252      * supported. Do nothing if the specified 3A region is not supported by camera device.
2253      * @param result The capture result to be validated
2254      * @param partialResults The partial results to be validated
2255      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
2256      * @param expectRegions The 3A regions expected in capture result
2257      * @param scaleByZoomRatio whether to scale the error threshold by zoom ratio
2258      * @param staticInfo static metadata characteristics
2259      */
validate3aRegion( CaptureResult result, List<CaptureResult> partialResults, int algoIdx, MeteringRectangle[] expectRegions, boolean scaleByZoomRatio, StaticMetadata staticInfo)2260     public static void validate3aRegion(
2261             CaptureResult result, List<CaptureResult> partialResults, int algoIdx,
2262             MeteringRectangle[] expectRegions, boolean scaleByZoomRatio, StaticMetadata staticInfo)
2263     {
2264         // There are multiple cases where result 3A region could be slightly different than the
2265         // request:
2266         // 1. Distortion correction,
2267         // 2. Adding smaller 3a region in the test exposes existing devices' offset is larger
2268         //    than 1.
2269         // 3. Precision loss due to converting to HAL zoom ratio and back
2270         // 4. Error magnification due to active array scale-up when zoom ratio API is used.
2271         //
2272         // To handle all these scenarios, make the threshold larger, and scale the threshold based
2273         // on zoom ratio. The scaling factor should be relatively tight, and shouldn't be smaller
2274         // than 1x.
2275         final int maxCoordOffset = 5;
2276         int maxRegions;
2277         CaptureResult.Key<MeteringRectangle[]> key;
2278         MeteringRectangle[] actualRegion;
2279 
2280         switch (algoIdx) {
2281             case INDEX_ALGORITHM_AE:
2282                 maxRegions = staticInfo.getAeMaxRegionsChecked();
2283                 key = CaptureResult.CONTROL_AE_REGIONS;
2284                 break;
2285             case INDEX_ALGORITHM_AWB:
2286                 maxRegions = staticInfo.getAwbMaxRegionsChecked();
2287                 key = CaptureResult.CONTROL_AWB_REGIONS;
2288                 break;
2289             case INDEX_ALGORITHM_AF:
2290                 maxRegions = staticInfo.getAfMaxRegionsChecked();
2291                 key = CaptureResult.CONTROL_AF_REGIONS;
2292                 break;
2293             default:
2294                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
2295         }
2296 
2297         int maxDist = maxCoordOffset;
2298         if (scaleByZoomRatio) {
2299             Float zoomRatio = result.get(CaptureResult.CONTROL_ZOOM_RATIO);
2300             for (CaptureResult partialResult : partialResults) {
2301                 Float zoomRatioInPartial = partialResult.get(CaptureResult.CONTROL_ZOOM_RATIO);
2302                 if (zoomRatioInPartial != null) {
2303                     assertEquals("CONTROL_ZOOM_RATIO in partial result must match"
2304                             + " that in final result", zoomRatio, zoomRatioInPartial);
2305                 }
2306             }
2307             maxDist = (int)Math.ceil(maxDist * Math.max(zoomRatio / 2, 1.0f));
2308         }
2309 
2310         if (maxRegions > 0)
2311         {
2312             actualRegion = getValueNotNull(result, key);
2313             for (CaptureResult partialResult : partialResults) {
2314                 MeteringRectangle[] actualRegionInPartial = partialResult.get(key);
2315                 if (actualRegionInPartial != null) {
2316                     assertEquals("Key " + key.getName() + " in partial result must match"
2317                             + " that in final result", actualRegionInPartial, actualRegion);
2318                 }
2319             }
2320 
2321             for (int i = 0; i < actualRegion.length; i++) {
2322                 // If the expected region's metering weight is 0, allow the camera device
2323                 // to override it.
2324                 if (expectRegions[i].getMeteringWeight() == 0) {
2325                     continue;
2326                 }
2327 
2328                 Rect a = actualRegion[i].getRect();
2329                 Rect e = expectRegions[i].getRect();
2330 
2331                 if (VERBOSE) {
2332                     Log.v(TAG, "Actual region " + actualRegion[i].toString() +
2333                             ", expected region " + expectRegions[i].toString() +
2334                             ", maxDist " + maxDist);
2335                 }
2336                 assertTrue(
2337                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2338                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2339                     maxDist >= Math.abs(a.left - e.left));
2340 
2341                 assertTrue(
2342                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2343                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2344                     maxDist >= Math.abs(a.right - e.right));
2345 
2346                 assertTrue(
2347                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2348                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2349                     maxDist >= Math.abs(a.top - e.top));
2350                 assertTrue(
2351                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2352                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2353                     maxDist >= Math.abs(a.bottom - e.bottom));
2354             }
2355         }
2356     }
2357 
2358 
2359     /**
2360      * Validate image based on format and size.
2361      *
2362      * @param image The image to be validated.
2363      * @param width The image width.
2364      * @param height The image height.
2365      * @param format The image format.
2366      * @param filePath The debug dump file path, null if don't want to dump to
2367      *            file.
2368      * @throws UnsupportedOperationException if calling with an unknown format
2369      */
validateImage(Image image, int width, int height, int format, String filePath)2370     public static void validateImage(Image image, int width, int height, int format,
2371             String filePath) {
2372         checkImage(image, width, height, format);
2373 
2374         /**
2375          * TODO: validate timestamp:
2376          * 1. capture result timestamp against the image timestamp (need
2377          * consider frame drops)
2378          * 2. timestamps should be monotonically increasing for different requests
2379          */
2380         if(VERBOSE) Log.v(TAG, "validating Image");
2381         byte[] data = getDataFromImage(image);
2382         assertTrue("Invalid image data", data != null && data.length > 0);
2383 
2384         switch (format) {
2385             // Clients must be able to process and handle depth jpeg images like any other
2386             // regular jpeg.
2387             case ImageFormat.DEPTH_JPEG:
2388             case ImageFormat.JPEG:
2389                 validateJpegData(data, width, height, filePath);
2390                 break;
2391             case ImageFormat.YCBCR_P010:
2392                 validateP010Data(data, width, height, format, image.getTimestamp(), filePath);
2393                 break;
2394             case ImageFormat.YUV_420_888:
2395             case ImageFormat.YV12:
2396                 validateYuvData(data, width, height, format, image.getTimestamp(), filePath);
2397                 break;
2398             case ImageFormat.RAW_SENSOR:
2399                 validateRaw16Data(data, width, height, format, image.getTimestamp(), filePath);
2400                 break;
2401             case ImageFormat.DEPTH16:
2402                 validateDepth16Data(data, width, height, format, image.getTimestamp(), filePath);
2403                 break;
2404             case ImageFormat.DEPTH_POINT_CLOUD:
2405                 validateDepthPointCloudData(data, width, height, format, image.getTimestamp(), filePath);
2406                 break;
2407             case ImageFormat.RAW_PRIVATE:
2408                 validateRawPrivateData(data, width, height, image.getTimestamp(), filePath);
2409                 break;
2410             case ImageFormat.Y8:
2411                 validateY8Data(data, width, height, format, image.getTimestamp(), filePath);
2412                 break;
2413             case ImageFormat.HEIC:
2414                 validateHeicData(data, width, height, filePath);
2415                 break;
2416             default:
2417                 throw new UnsupportedOperationException("Unsupported format for validation: "
2418                         + format);
2419         }
2420     }
2421 
2422     public static class HandlerExecutor implements Executor {
2423         private final Handler mHandler;
2424 
HandlerExecutor(Handler handler)2425         public HandlerExecutor(Handler handler) {
2426             assertNotNull("handler must be valid", handler);
2427             mHandler = handler;
2428         }
2429 
2430         @Override
execute(Runnable runCmd)2431         public void execute(Runnable runCmd) {
2432             mHandler.post(runCmd);
2433         }
2434     }
2435 
2436     /**
2437      * Provide a mock for {@link CameraDevice.StateCallback}.
2438      *
2439      * <p>Only useful because mockito can't mock {@link CameraDevice.StateCallback} which is an
2440      * abstract class.</p>
2441      *
2442      * <p>
2443      * Use this instead of other classes when needing to verify interactions, since
2444      * trying to spy on {@link BlockingStateCallback} (or others) will cause unnecessary extra
2445      * interactions which will cause false test failures.
2446      * </p>
2447      *
2448      */
2449     public static class MockStateCallback extends CameraDevice.StateCallback {
2450 
2451         @Override
onOpened(CameraDevice camera)2452         public void onOpened(CameraDevice camera) {
2453         }
2454 
2455         @Override
onDisconnected(CameraDevice camera)2456         public void onDisconnected(CameraDevice camera) {
2457         }
2458 
2459         @Override
onError(CameraDevice camera, int error)2460         public void onError(CameraDevice camera, int error) {
2461         }
2462 
MockStateCallback()2463         private MockStateCallback() {}
2464 
2465         /**
2466          * Create a Mockito-ready mocked StateCallback.
2467          */
mock()2468         public static MockStateCallback mock() {
2469             return Mockito.spy(new MockStateCallback());
2470         }
2471     }
2472 
validateJpegData(byte[] jpegData, int width, int height, String filePath)2473     public static void validateJpegData(byte[] jpegData, int width, int height, String filePath) {
2474         BitmapFactory.Options bmpOptions = new BitmapFactory.Options();
2475         // DecodeBound mode: only parse the frame header to get width/height.
2476         // it doesn't decode the pixel.
2477         bmpOptions.inJustDecodeBounds = true;
2478         BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length, bmpOptions);
2479         assertEquals(width, bmpOptions.outWidth);
2480         assertEquals(height, bmpOptions.outHeight);
2481 
2482         // Pixel decoding mode: decode whole image. check if the image data
2483         // is decodable here.
2484         assertNotNull("Decoding jpeg failed",
2485                 BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length));
2486         if (DEBUG && filePath != null) {
2487             String fileName =
2488                     filePath + "/" + width + "x" + height + ".jpeg";
2489             dumpFile(fileName, jpegData);
2490         }
2491     }
2492 
validateYuvData(byte[] yuvData, int width, int height, int format, long ts, String filePath)2493     private static void validateYuvData(byte[] yuvData, int width, int height, int format,
2494             long ts, String filePath) {
2495         checkYuvFormat(format);
2496         if (VERBOSE) Log.v(TAG, "Validating YUV data");
2497         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2498         assertEquals("Yuv data doesn't match", expectedSize, yuvData.length);
2499 
2500         // TODO: Can add data validation for test pattern.
2501 
2502         if (DEBUG && filePath != null) {
2503             String fileName =
2504                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".yuv";
2505             dumpFile(fileName, yuvData);
2506         }
2507     }
2508 
validateP010Data(byte[] p010Data, int width, int height, int format, long ts, String filePath)2509     private static void validateP010Data(byte[] p010Data, int width, int height, int format,
2510             long ts, String filePath) {
2511         if (VERBOSE) Log.v(TAG, "Validating P010 data");
2512         // The P010 10 bit samples are stored in two bytes so the size needs to be adjusted
2513         // accordingly.
2514         int bytesPerPixelRounded = (ImageFormat.getBitsPerPixel(format) + 7) / 8;
2515         int expectedSize = width * height * bytesPerPixelRounded;
2516         assertEquals("P010 data doesn't match", expectedSize, p010Data.length);
2517 
2518         if (DEBUG && filePath != null) {
2519             String fileName =
2520                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".p010";
2521             dumpFile(fileName, p010Data);
2522         }
2523     }
validateRaw16Data(byte[] rawData, int width, int height, int format, long ts, String filePath)2524     private static void validateRaw16Data(byte[] rawData, int width, int height, int format,
2525             long ts, String filePath) {
2526         if (VERBOSE) Log.v(TAG, "Validating raw data");
2527         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2528         assertEquals("Raw data doesn't match", expectedSize, rawData.length);
2529 
2530         // TODO: Can add data validation for test pattern.
2531 
2532         if (DEBUG && filePath != null) {
2533             String fileName =
2534                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".raw16";
2535             dumpFile(fileName, rawData);
2536         }
2537 
2538         return;
2539     }
2540 
validateY8Data(byte[] rawData, int width, int height, int format, long ts, String filePath)2541     private static void validateY8Data(byte[] rawData, int width, int height, int format,
2542             long ts, String filePath) {
2543         if (VERBOSE) Log.v(TAG, "Validating Y8 data");
2544         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2545         assertEquals("Y8 data doesn't match", expectedSize, rawData.length);
2546 
2547         // TODO: Can add data validation for test pattern.
2548 
2549         if (DEBUG && filePath != null) {
2550             String fileName =
2551                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".y8";
2552             dumpFile(fileName, rawData);
2553         }
2554 
2555         return;
2556     }
2557 
validateRawPrivateData(byte[] rawData, int width, int height, long ts, String filePath)2558     private static void validateRawPrivateData(byte[] rawData, int width, int height,
2559             long ts, String filePath) {
2560         if (VERBOSE) Log.v(TAG, "Validating private raw data");
2561         // Expect each RAW pixel should occupy at least one byte and no more than 30 bytes
2562         int expectedSizeMin = width * height;
2563         int expectedSizeMax = width * height * 30;
2564 
2565         assertTrue("Opaque RAW size " + rawData.length + "out of normal bound [" +
2566                 expectedSizeMin + "," + expectedSizeMax + "]",
2567                 expectedSizeMin <= rawData.length && rawData.length <= expectedSizeMax);
2568 
2569         if (DEBUG && filePath != null) {
2570             String fileName =
2571                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".rawPriv";
2572             dumpFile(fileName, rawData);
2573         }
2574 
2575         return;
2576     }
2577 
validateDepth16Data(byte[] depthData, int width, int height, int format, long ts, String filePath)2578     private static void validateDepth16Data(byte[] depthData, int width, int height, int format,
2579             long ts, String filePath) {
2580 
2581         if (VERBOSE) Log.v(TAG, "Validating depth16 data");
2582         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2583         assertEquals("Depth data doesn't match", expectedSize, depthData.length);
2584 
2585 
2586         if (DEBUG && filePath != null) {
2587             String fileName =
2588                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth16";
2589             dumpFile(fileName, depthData);
2590         }
2591 
2592         return;
2593 
2594     }
2595 
validateDepthPointCloudData(byte[] depthData, int width, int height, int format, long ts, String filePath)2596     private static void validateDepthPointCloudData(byte[] depthData, int width, int height, int format,
2597             long ts, String filePath) {
2598 
2599         if (VERBOSE) Log.v(TAG, "Validating depth point cloud data");
2600 
2601         // Can't validate size since it is variable
2602 
2603         if (DEBUG && filePath != null) {
2604             String fileName =
2605                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth_point_cloud";
2606             dumpFile(fileName, depthData);
2607         }
2608 
2609         return;
2610 
2611     }
2612 
validateHeicData(byte[] heicData, int width, int height, String filePath)2613     private static void validateHeicData(byte[] heicData, int width, int height, String filePath) {
2614         BitmapFactory.Options bmpOptions = new BitmapFactory.Options();
2615         // DecodeBound mode: only parse the frame header to get width/height.
2616         // it doesn't decode the pixel.
2617         bmpOptions.inJustDecodeBounds = true;
2618         BitmapFactory.decodeByteArray(heicData, 0, heicData.length, bmpOptions);
2619         assertEquals(width, bmpOptions.outWidth);
2620         assertEquals(height, bmpOptions.outHeight);
2621 
2622         // Pixel decoding mode: decode whole image. check if the image data
2623         // is decodable here.
2624         assertNotNull("Decoding heic failed",
2625                 BitmapFactory.decodeByteArray(heicData, 0, heicData.length));
2626         if (DEBUG && filePath != null) {
2627             String fileName =
2628                     filePath + "/" + width + "x" + height + ".heic";
2629             dumpFile(fileName, heicData);
2630         }
2631     }
2632 
getValueNotNull(CaptureResult result, CaptureResult.Key<T> key)2633     public static <T> T getValueNotNull(CaptureResult result, CaptureResult.Key<T> key) {
2634         if (result == null) {
2635             throw new IllegalArgumentException("Result must not be null");
2636         }
2637 
2638         T value = result.get(key);
2639         assertNotNull("Value of Key " + key.getName() + "shouldn't be null", value);
2640         return value;
2641     }
2642 
getValueNotNull(CameraCharacteristics characteristics, CameraCharacteristics.Key<T> key)2643     public static <T> T getValueNotNull(CameraCharacteristics characteristics,
2644             CameraCharacteristics.Key<T> key) {
2645         if (characteristics == null) {
2646             throw new IllegalArgumentException("Camera characteristics must not be null");
2647         }
2648 
2649         T value = characteristics.get(key);
2650         assertNotNull("Value of Key " + key.getName() + "shouldn't be null", value);
2651         return value;
2652     }
2653 
2654     /**
2655      * Get a crop region for a given zoom factor and center position.
2656      * <p>
2657      * The center position is normalized position in range of [0, 1.0], where
2658      * (0, 0) represents top left corner, (1.0. 1.0) represents bottom right
2659      * corner. The center position could limit the effective minimal zoom
2660      * factor, for example, if the center position is (0.75, 0.75), the
2661      * effective minimal zoom position becomes 2.0. If the requested zoom factor
2662      * is smaller than 2.0, a crop region with 2.0 zoom factor will be returned.
2663      * </p>
2664      * <p>
2665      * The aspect ratio of the crop region is maintained the same as the aspect
2666      * ratio of active array.
2667      * </p>
2668      *
2669      * @param zoomFactor The zoom factor to generate the crop region, it must be
2670      *            >= 1.0
2671      * @param center The normalized zoom center point that is in the range of [0, 1].
2672      * @param maxZoom The max zoom factor supported by this device.
2673      * @param activeArray The active array size of this device.
2674      * @return crop region for the given normalized center and zoom factor.
2675      */
getCropRegionForZoom(float zoomFactor, final PointF center, final float maxZoom, final Rect activeArray)2676     public static Rect getCropRegionForZoom(float zoomFactor, final PointF center,
2677             final float maxZoom, final Rect activeArray) {
2678         if (zoomFactor < 1.0) {
2679             throw new IllegalArgumentException("zoom factor " + zoomFactor + " should be >= 1.0");
2680         }
2681         if (center.x > 1.0 || center.x < 0) {
2682             throw new IllegalArgumentException("center.x " + center.x
2683                     + " should be in range of [0, 1.0]");
2684         }
2685         if (center.y > 1.0 || center.y < 0) {
2686             throw new IllegalArgumentException("center.y " + center.y
2687                     + " should be in range of [0, 1.0]");
2688         }
2689         if (maxZoom < 1.0) {
2690             throw new IllegalArgumentException("max zoom factor " + maxZoom + " should be >= 1.0");
2691         }
2692         if (activeArray == null) {
2693             throw new IllegalArgumentException("activeArray must not be null");
2694         }
2695 
2696         float minCenterLength = Math.min(Math.min(center.x, 1.0f - center.x),
2697                 Math.min(center.y, 1.0f - center.y));
2698         float minEffectiveZoom =  0.5f / minCenterLength;
2699         if (minEffectiveZoom > maxZoom) {
2700             throw new IllegalArgumentException("Requested center " + center.toString() +
2701                     " has minimal zoomable factor " + minEffectiveZoom + ", which exceeds max"
2702                             + " zoom factor " + maxZoom);
2703         }
2704 
2705         if (zoomFactor < minEffectiveZoom) {
2706             Log.w(TAG, "Requested zoomFactor " + zoomFactor + " < minimal zoomable factor "
2707                     + minEffectiveZoom + ". It will be overwritten by " + minEffectiveZoom);
2708             zoomFactor = minEffectiveZoom;
2709         }
2710 
2711         int cropCenterX = (int)(activeArray.width() * center.x);
2712         int cropCenterY = (int)(activeArray.height() * center.y);
2713         int cropWidth = (int) (activeArray.width() / zoomFactor);
2714         int cropHeight = (int) (activeArray.height() / zoomFactor);
2715 
2716         return new Rect(
2717                 /*left*/cropCenterX - cropWidth / 2,
2718                 /*top*/cropCenterY - cropHeight / 2,
2719                 /*right*/ cropCenterX + cropWidth / 2,
2720                 /*bottom*/cropCenterY + cropHeight / 2);
2721     }
2722 
2723     /**
2724      * Get AeAvailableTargetFpsRanges and sort them in descending order by max fps
2725      *
2726      * @param staticInfo camera static metadata
2727      * @return AeAvailableTargetFpsRanges in descending order by max fps
2728      */
getDescendingTargetFpsRanges(StaticMetadata staticInfo)2729     public static Range<Integer>[] getDescendingTargetFpsRanges(StaticMetadata staticInfo) {
2730         Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked();
2731         Arrays.sort(fpsRanges, new Comparator<Range<Integer>>() {
2732             public int compare(Range<Integer> r1, Range<Integer> r2) {
2733                 return r2.getUpper() - r1.getUpper();
2734             }
2735         });
2736         return fpsRanges;
2737     }
2738 
2739     /**
2740      * Get AeAvailableTargetFpsRanges with max fps not exceeding 30
2741      *
2742      * @param staticInfo camera static metadata
2743      * @return AeAvailableTargetFpsRanges with max fps not exceeding 30
2744      */
getTargetFpsRangesUpTo30(StaticMetadata staticInfo)2745     public static List<Range<Integer>> getTargetFpsRangesUpTo30(StaticMetadata staticInfo) {
2746         Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked();
2747         ArrayList<Range<Integer>> fpsRangesUpTo30 = new ArrayList<Range<Integer>>();
2748         for (Range<Integer> fpsRange : fpsRanges) {
2749             if (fpsRange.getUpper() <= 30) {
2750                 fpsRangesUpTo30.add(fpsRange);
2751             }
2752         }
2753         return fpsRangesUpTo30;
2754     }
2755 
2756     /**
2757      * Get AeAvailableTargetFpsRanges with max fps greater than 30
2758      *
2759      * @param staticInfo camera static metadata
2760      * @return AeAvailableTargetFpsRanges with max fps greater than 30
2761      */
getTargetFpsRangesGreaterThan30(StaticMetadata staticInfo)2762     public static List<Range<Integer>> getTargetFpsRangesGreaterThan30(StaticMetadata staticInfo) {
2763         Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked();
2764         ArrayList<Range<Integer>> fpsRangesGreaterThan30 = new ArrayList<Range<Integer>>();
2765         for (Range<Integer> fpsRange : fpsRanges) {
2766             if (fpsRange.getUpper() > 30) {
2767                 fpsRangesGreaterThan30.add(fpsRange);
2768             }
2769         }
2770         return fpsRangesGreaterThan30;
2771     }
2772 
2773     /**
2774      * Calculate output 3A region from the intersection of input 3A region and cropped region.
2775      *
2776      * @param requestRegions The input 3A regions
2777      * @param cropRect The cropped region
2778      * @return expected 3A regions output in capture result
2779      */
getExpectedOutputRegion( MeteringRectangle[] requestRegions, Rect cropRect)2780     public static MeteringRectangle[] getExpectedOutputRegion(
2781             MeteringRectangle[] requestRegions, Rect cropRect){
2782         MeteringRectangle[] resultRegions = new MeteringRectangle[requestRegions.length];
2783         for (int i = 0; i < requestRegions.length; i++) {
2784             Rect requestRect = requestRegions[i].getRect();
2785             Rect resultRect = new Rect();
2786             boolean intersect = resultRect.setIntersect(requestRect, cropRect);
2787             resultRegions[i] = new MeteringRectangle(
2788                     resultRect,
2789                     intersect ? requestRegions[i].getMeteringWeight() : 0);
2790         }
2791         return resultRegions;
2792     }
2793 
2794     /**
2795      * Copy source image data to destination image.
2796      *
2797      * @param src The source image to be copied from.
2798      * @param dst The destination image to be copied to.
2799      * @throws IllegalArgumentException If the source and destination images have
2800      *             different format, size, or one of the images is not copyable.
2801      */
imageCopy(Image src, Image dst)2802     public static void imageCopy(Image src, Image dst) {
2803         if (src == null || dst == null) {
2804             throw new IllegalArgumentException("Images should be non-null");
2805         }
2806         if (src.getFormat() != dst.getFormat()) {
2807             throw new IllegalArgumentException("Src and dst images should have the same format");
2808         }
2809         if (src.getFormat() == ImageFormat.PRIVATE ||
2810                 dst.getFormat() == ImageFormat.PRIVATE) {
2811             throw new IllegalArgumentException("PRIVATE format images are not copyable");
2812         }
2813 
2814         Size srcSize = new Size(src.getWidth(), src.getHeight());
2815         Size dstSize = new Size(dst.getWidth(), dst.getHeight());
2816         if (!srcSize.equals(dstSize)) {
2817             throw new IllegalArgumentException("source image size " + srcSize + " is different"
2818                     + " with " + "destination image size " + dstSize);
2819         }
2820 
2821         // TODO: check the owner of the dst image, it must be from ImageWriter, other source may
2822         // not be writable. Maybe we should add an isWritable() method in image class.
2823 
2824         Plane[] srcPlanes = src.getPlanes();
2825         Plane[] dstPlanes = dst.getPlanes();
2826         ByteBuffer srcBuffer = null;
2827         ByteBuffer dstBuffer = null;
2828         for (int i = 0; i < srcPlanes.length; i++) {
2829             srcBuffer = srcPlanes[i].getBuffer();
2830             dstBuffer = dstPlanes[i].getBuffer();
2831             int srcPos = srcBuffer.position();
2832             srcBuffer.rewind();
2833             dstBuffer.rewind();
2834             int srcRowStride = srcPlanes[i].getRowStride();
2835             int dstRowStride = dstPlanes[i].getRowStride();
2836             int srcPixStride = srcPlanes[i].getPixelStride();
2837             int dstPixStride = dstPlanes[i].getPixelStride();
2838 
2839             if (srcPixStride > 2 || dstPixStride > 2) {
2840                 throw new IllegalArgumentException("source pixel stride " + srcPixStride +
2841                         " with destination pixel stride " + dstPixStride +
2842                         " is not supported");
2843             }
2844 
2845             if (srcRowStride == dstRowStride && srcPixStride == dstPixStride &&
2846                     srcPixStride == 1) {
2847                 // Fast path, just copy the content in the byteBuffer all together.
2848                 dstBuffer.put(srcBuffer);
2849             } else {
2850                 Size effectivePlaneSize = getEffectivePlaneSizeForImage(src, i);
2851                 int srcRowByteCount = srcRowStride;
2852                 int dstRowByteCount = dstRowStride;
2853                 byte[] srcDataRow = new byte[Math.max(srcRowStride, dstRowStride)];
2854 
2855                 if (srcPixStride == dstPixStride && srcPixStride == 1) {
2856                     // Row by row copy case
2857                     for (int row = 0; row < effectivePlaneSize.getHeight(); row++) {
2858                         if (row == effectivePlaneSize.getHeight() - 1) {
2859                             // Special case for interleaved planes: need handle the last row
2860                             // carefully to avoid memory corruption. Check if we have enough bytes
2861                             // to copy.
2862                             srcRowByteCount = Math.min(srcRowByteCount, srcBuffer.remaining());
2863                             dstRowByteCount = Math.min(dstRowByteCount, dstBuffer.remaining());
2864                         }
2865                         srcBuffer.get(srcDataRow, /*offset*/0, srcRowByteCount);
2866                         dstBuffer.put(srcDataRow, /*offset*/0, dstRowByteCount);
2867                     }
2868                 } else {
2869                     // Row by row per pixel copy case
2870                     byte[] dstDataRow = new byte[dstRowByteCount];
2871                     for (int row = 0; row < effectivePlaneSize.getHeight(); row++) {
2872                         if (row == effectivePlaneSize.getHeight() - 1) {
2873                             // Special case for interleaved planes: need handle the last row
2874                             // carefully to avoid memory corruption. Check if we have enough bytes
2875                             // to copy.
2876                             int remainingBytes = srcBuffer.remaining();
2877                             if (srcRowByteCount > remainingBytes) {
2878                                 srcRowByteCount = remainingBytes;
2879                             }
2880                             remainingBytes = dstBuffer.remaining();
2881                             if (dstRowByteCount > remainingBytes) {
2882                                 dstRowByteCount = remainingBytes;
2883                             }
2884                         }
2885                         srcBuffer.get(srcDataRow, /*offset*/0, srcRowByteCount);
2886                         int pos = dstBuffer.position();
2887                         dstBuffer.get(dstDataRow, /*offset*/0, dstRowByteCount);
2888                         dstBuffer.position(pos);
2889                         for (int x = 0; x < effectivePlaneSize.getWidth(); x++) {
2890                             dstDataRow[x * dstPixStride] = srcDataRow[x * srcPixStride];
2891                         }
2892                         dstBuffer.put(dstDataRow, /*offset*/0, dstRowByteCount);
2893                     }
2894                 }
2895             }
2896             srcBuffer.position(srcPos);
2897             dstBuffer.rewind();
2898         }
2899     }
2900 
getEffectivePlaneSizeForImage(Image image, int planeIdx)2901     private static Size getEffectivePlaneSizeForImage(Image image, int planeIdx) {
2902         switch (image.getFormat()) {
2903             case ImageFormat.YUV_420_888:
2904                 if (planeIdx == 0) {
2905                     return new Size(image.getWidth(), image.getHeight());
2906                 } else {
2907                     return new Size(image.getWidth() / 2, image.getHeight() / 2);
2908                 }
2909             case ImageFormat.JPEG:
2910             case ImageFormat.RAW_SENSOR:
2911             case ImageFormat.RAW10:
2912             case ImageFormat.RAW12:
2913             case ImageFormat.DEPTH16:
2914                 return new Size(image.getWidth(), image.getHeight());
2915             case ImageFormat.PRIVATE:
2916                 return new Size(0, 0);
2917             default:
2918                 throw new UnsupportedOperationException(
2919                         String.format("Invalid image format %d", image.getFormat()));
2920         }
2921     }
2922 
2923     /**
2924      * <p>
2925      * Checks whether the two images are strongly equal.
2926      * </p>
2927      * <p>
2928      * Two images are strongly equal if and only if the data, formats, sizes,
2929      * and timestamps are same. For {@link ImageFormat#PRIVATE PRIVATE} format
2930      * images, the image data is not accessible thus the data comparison is
2931      * effectively skipped as the number of planes is zero.
2932      * </p>
2933      * <p>
2934      * Note that this method compares the pixel data even outside of the crop
2935      * region, which may not be necessary for general use case.
2936      * </p>
2937      *
2938      * @param lhsImg First image to be compared with.
2939      * @param rhsImg Second image to be compared with.
2940      * @return true if the two images are equal, false otherwise.
2941      * @throws IllegalArgumentException If either of image is null.
2942      */
isImageStronglyEqual(Image lhsImg, Image rhsImg)2943     public static boolean isImageStronglyEqual(Image lhsImg, Image rhsImg) {
2944         if (lhsImg == null || rhsImg == null) {
2945             throw new IllegalArgumentException("Images should be non-null");
2946         }
2947 
2948         if (lhsImg.getFormat() != rhsImg.getFormat()) {
2949             Log.i(TAG, "lhsImg format " + lhsImg.getFormat() + " is different with rhsImg format "
2950                     + rhsImg.getFormat());
2951             return false;
2952         }
2953 
2954         if (lhsImg.getWidth() != rhsImg.getWidth()) {
2955             Log.i(TAG, "lhsImg width " + lhsImg.getWidth() + " is different with rhsImg width "
2956                     + rhsImg.getWidth());
2957             return false;
2958         }
2959 
2960         if (lhsImg.getHeight() != rhsImg.getHeight()) {
2961             Log.i(TAG, "lhsImg height " + lhsImg.getHeight() + " is different with rhsImg height "
2962                     + rhsImg.getHeight());
2963             return false;
2964         }
2965 
2966         if (lhsImg.getTimestamp() != rhsImg.getTimestamp()) {
2967             Log.i(TAG, "lhsImg timestamp " + lhsImg.getTimestamp()
2968                     + " is different with rhsImg timestamp " + rhsImg.getTimestamp());
2969             return false;
2970         }
2971 
2972         if (!lhsImg.getCropRect().equals(rhsImg.getCropRect())) {
2973             Log.i(TAG, "lhsImg crop rect " + lhsImg.getCropRect()
2974                     + " is different with rhsImg crop rect " + rhsImg.getCropRect());
2975             return false;
2976         }
2977 
2978         // Compare data inside of the image.
2979         Plane[] lhsPlanes = lhsImg.getPlanes();
2980         Plane[] rhsPlanes = rhsImg.getPlanes();
2981         ByteBuffer lhsBuffer = null;
2982         ByteBuffer rhsBuffer = null;
2983         for (int i = 0; i < lhsPlanes.length; i++) {
2984             lhsBuffer = lhsPlanes[i].getBuffer();
2985             rhsBuffer = rhsPlanes[i].getBuffer();
2986             lhsBuffer.rewind();
2987             rhsBuffer.rewind();
2988             // Special case for YUV420_888 buffer with different layout or
2989             // potentially differently interleaved U/V planes.
2990             if (lhsImg.getFormat() == ImageFormat.YUV_420_888 &&
2991                     (lhsPlanes[i].getPixelStride() != rhsPlanes[i].getPixelStride() ||
2992                      lhsPlanes[i].getRowStride() != rhsPlanes[i].getRowStride() ||
2993                      (lhsPlanes[i].getPixelStride() != 1))) {
2994                 int width = getEffectivePlaneSizeForImage(lhsImg, i).getWidth();
2995                 int height = getEffectivePlaneSizeForImage(lhsImg, i).getHeight();
2996                 int rowSizeL = lhsPlanes[i].getRowStride();
2997                 int rowSizeR = rhsPlanes[i].getRowStride();
2998                 byte[] lhsRow = new byte[rowSizeL];
2999                 byte[] rhsRow = new byte[rowSizeR];
3000                 int pixStrideL = lhsPlanes[i].getPixelStride();
3001                 int pixStrideR = rhsPlanes[i].getPixelStride();
3002                 for (int r = 0; r < height; r++) {
3003                     if (r == height -1) {
3004                         rowSizeL = lhsBuffer.remaining();
3005                         rowSizeR = rhsBuffer.remaining();
3006                     }
3007                     lhsBuffer.get(lhsRow, /*offset*/0, rowSizeL);
3008                     rhsBuffer.get(rhsRow, /*offset*/0, rowSizeR);
3009                     for (int c = 0; c < width; c++) {
3010                         if (lhsRow[c * pixStrideL] != rhsRow[c * pixStrideR]) {
3011                             Log.i(TAG, String.format(
3012                                     "byte buffers for plane %d row %d col %d don't match.",
3013                                     i, r, c));
3014                             return false;
3015                         }
3016                     }
3017                 }
3018             } else {
3019                 // Compare entire buffer directly
3020                 if (!lhsBuffer.equals(rhsBuffer)) {
3021                     Log.i(TAG, "byte buffers for plane " +  i + " don't match.");
3022                     return false;
3023                 }
3024             }
3025         }
3026 
3027         return true;
3028     }
3029 
3030     /**
3031      * Set jpeg related keys in a capture request builder.
3032      *
3033      * @param builder The capture request builder to set the keys inl
3034      * @param exifData The exif data to set.
3035      * @param thumbnailSize The thumbnail size to set.
3036      * @param collector The camera error collector to collect errors.
3037      */
setJpegKeys(CaptureRequest.Builder builder, ExifTestData exifData, Size thumbnailSize, CameraErrorCollector collector)3038     public static void setJpegKeys(CaptureRequest.Builder builder, ExifTestData exifData,
3039             Size thumbnailSize, CameraErrorCollector collector) {
3040         builder.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, thumbnailSize);
3041         builder.set(CaptureRequest.JPEG_GPS_LOCATION, exifData.gpsLocation);
3042         builder.set(CaptureRequest.JPEG_ORIENTATION, exifData.jpegOrientation);
3043         builder.set(CaptureRequest.JPEG_QUALITY, exifData.jpegQuality);
3044         builder.set(CaptureRequest.JPEG_THUMBNAIL_QUALITY,
3045                 exifData.thumbnailQuality);
3046 
3047         // Validate request set and get.
3048         collector.expectEquals("JPEG thumbnail size request set and get should match",
3049                 thumbnailSize, builder.get(CaptureRequest.JPEG_THUMBNAIL_SIZE));
3050         collector.expectTrue("GPS locations request set and get should match.",
3051                 areGpsFieldsEqual(exifData.gpsLocation,
3052                 builder.get(CaptureRequest.JPEG_GPS_LOCATION)));
3053         collector.expectEquals("JPEG orientation request set and get should match",
3054                 exifData.jpegOrientation,
3055                 builder.get(CaptureRequest.JPEG_ORIENTATION));
3056         collector.expectEquals("JPEG quality request set and get should match",
3057                 exifData.jpegQuality, builder.get(CaptureRequest.JPEG_QUALITY));
3058         collector.expectEquals("JPEG thumbnail quality request set and get should match",
3059                 exifData.thumbnailQuality,
3060                 builder.get(CaptureRequest.JPEG_THUMBNAIL_QUALITY));
3061     }
3062 
3063     /**
3064      * Simple validation of JPEG image size and format.
3065      * <p>
3066      * Only validate the image object basic correctness. It is fast, but doesn't actually
3067      * check the buffer data. Assert is used here as it make no sense to
3068      * continue the test if the jpeg image captured has some serious failures.
3069      * </p>
3070      *
3071      * @param image The captured JPEG/HEIC image
3072      * @param expectedSize Expected capture JEPG/HEIC size
3073      * @param format JPEG/HEIC image format
3074      */
basicValidateBlobImage(Image image, Size expectedSize, int format)3075     public static void basicValidateBlobImage(Image image, Size expectedSize, int format) {
3076         Size imageSz = new Size(image.getWidth(), image.getHeight());
3077         assertTrue(
3078                 String.format("Image size doesn't match (expected %s, actual %s) ",
3079                         expectedSize.toString(), imageSz.toString()), expectedSize.equals(imageSz));
3080         assertEquals("Image format should be " + ((format == ImageFormat.HEIC) ? "HEIC" : "JPEG"),
3081                 format, image.getFormat());
3082         assertNotNull("Image plane shouldn't be null", image.getPlanes());
3083         assertEquals("Image plane number should be 1", 1, image.getPlanes().length);
3084 
3085         // Jpeg/Heic decoding validate was done in ImageReaderTest,
3086         // no need to duplicate the test here.
3087     }
3088 
3089     /**
3090      * Verify the EXIF and JPEG related keys in a capture result are expected.
3091      * - Capture request get values are same as were set.
3092      * - capture result's exif data is the same as was set by
3093      *   the capture request.
3094      * - new tags in the result set by the camera service are
3095      *   present and semantically correct.
3096      *
3097      * @param image The output JPEG/HEIC image to verify.
3098      * @param captureResult The capture result to verify.
3099      * @param expectedSize The expected JPEG/HEIC size.
3100      * @param expectedThumbnailSize The expected thumbnail size.
3101      * @param expectedExifData The expected EXIF data
3102      * @param staticInfo The static metadata for the camera device.
3103      * @param allStaticInfo The camera Id to static metadata map for all cameras.
3104      * @param blobFilename The filename to dump the jpeg/heic to.
3105      * @param collector The camera error collector to collect errors.
3106      * @param format JPEG/HEIC format
3107      */
verifyJpegKeys(Image image, CaptureResult captureResult, Size expectedSize, Size expectedThumbnailSize, ExifTestData expectedExifData, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo, CameraErrorCollector collector, String debugFileNameBase, int format)3108     public static void verifyJpegKeys(Image image, CaptureResult captureResult, Size expectedSize,
3109             Size expectedThumbnailSize, ExifTestData expectedExifData, StaticMetadata staticInfo,
3110             HashMap<String, StaticMetadata> allStaticInfo, CameraErrorCollector collector,
3111             String debugFileNameBase, int format) throws Exception {
3112 
3113         basicValidateBlobImage(image, expectedSize, format);
3114 
3115         byte[] blobBuffer = getDataFromImage(image);
3116         // Have to dump into a file to be able to use ExifInterface
3117         String filePostfix = (format == ImageFormat.HEIC ? ".heic" : ".jpeg");
3118         String blobFilename = debugFileNameBase + "/verifyJpegKeys" + filePostfix;
3119         dumpFile(blobFilename, blobBuffer);
3120         ExifInterface exif = new ExifInterface(blobFilename);
3121 
3122         if (expectedThumbnailSize.equals(new Size(0,0))) {
3123             collector.expectTrue("Jpeg shouldn't have thumbnail when thumbnail size is (0, 0)",
3124                     !exif.hasThumbnail());
3125         } else {
3126             collector.expectTrue("Jpeg must have thumbnail for thumbnail size " +
3127                     expectedThumbnailSize, exif.hasThumbnail());
3128         }
3129 
3130         // Validate capture result vs. request
3131         Size resultThumbnailSize = captureResult.get(CaptureResult.JPEG_THUMBNAIL_SIZE);
3132         int orientationTested = expectedExifData.jpegOrientation;
3133         // Legacy shim always doesn't rotate thumbnail size
3134         if ((orientationTested == 90 || orientationTested == 270) &&
3135                 staticInfo.isHardwareLevelAtLeastLimited()) {
3136             int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
3137                     /*defaultValue*/-1);
3138             if (exifOrientation == ExifInterface.ORIENTATION_UNDEFINED) {
3139                 // Device physically rotated image+thumbnail data
3140                 // Expect thumbnail size to be also rotated
3141                 resultThumbnailSize = new Size(resultThumbnailSize.getHeight(),
3142                         resultThumbnailSize.getWidth());
3143             }
3144         }
3145 
3146         collector.expectEquals("JPEG thumbnail size result and request should match",
3147                 expectedThumbnailSize, resultThumbnailSize);
3148         if (collector.expectKeyValueNotNull(captureResult, CaptureResult.JPEG_GPS_LOCATION) !=
3149                 null) {
3150             collector.expectTrue("GPS location result and request should match.",
3151                     areGpsFieldsEqual(expectedExifData.gpsLocation,
3152                     captureResult.get(CaptureResult.JPEG_GPS_LOCATION)));
3153         }
3154         collector.expectEquals("JPEG orientation result and request should match",
3155                 expectedExifData.jpegOrientation,
3156                 captureResult.get(CaptureResult.JPEG_ORIENTATION));
3157         collector.expectEquals("JPEG quality result and request should match",
3158                 expectedExifData.jpegQuality, captureResult.get(CaptureResult.JPEG_QUALITY));
3159         collector.expectEquals("JPEG thumbnail quality result and request should match",
3160                 expectedExifData.thumbnailQuality,
3161                 captureResult.get(CaptureResult.JPEG_THUMBNAIL_QUALITY));
3162 
3163         // Validate other exif tags for all non-legacy devices
3164         if (!staticInfo.isHardwareLevelLegacy()) {
3165             verifyJpegExifExtraTags(exif, expectedSize, captureResult, staticInfo, allStaticInfo,
3166                     collector, expectedExifData);
3167         }
3168     }
3169 
3170     /**
3171      * Get the degree of an EXIF orientation.
3172      */
getExifOrientationInDegree(int exifOrientation, CameraErrorCollector collector)3173     private static int getExifOrientationInDegree(int exifOrientation,
3174             CameraErrorCollector collector) {
3175         switch (exifOrientation) {
3176             case ExifInterface.ORIENTATION_NORMAL:
3177                 return 0;
3178             case ExifInterface.ORIENTATION_ROTATE_90:
3179                 return 90;
3180             case ExifInterface.ORIENTATION_ROTATE_180:
3181                 return 180;
3182             case ExifInterface.ORIENTATION_ROTATE_270:
3183                 return 270;
3184             default:
3185                 collector.addMessage("It is impossible to get non 0, 90, 180, 270 degress exif" +
3186                         "info based on the request orientation range");
3187                 return 0;
3188         }
3189     }
3190 
3191     /**
3192      * Get all of the supported focal lengths for capture result.
3193      *
3194      * If the camera is a logical camera, return the focal lengths of the logical camera
3195      * and its active physical camera.
3196      *
3197      * If the camera isn't a logical camera, return the focal lengths supported by the
3198      * single camera.
3199      */
getAvailableFocalLengthsForResult(CaptureResult result, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo)3200     public static Set<Float> getAvailableFocalLengthsForResult(CaptureResult result,
3201             StaticMetadata staticInfo,
3202             HashMap<String, StaticMetadata> allStaticInfo) {
3203         Set<Float> focalLengths = new HashSet<Float>();
3204         float[] supportedFocalLengths = staticInfo.getAvailableFocalLengthsChecked();
3205         for (float focalLength : supportedFocalLengths) {
3206             focalLengths.add(focalLength);
3207         }
3208 
3209         if (staticInfo.isLogicalMultiCamera()) {
3210             boolean activePhysicalCameraIdSupported =
3211                     staticInfo.isActivePhysicalCameraIdSupported();
3212             Set<String> physicalCameraIds;
3213             if (activePhysicalCameraIdSupported) {
3214                 String activePhysicalCameraId = result.get(
3215                         CaptureResult.LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID);
3216                 physicalCameraIds = new HashSet<String>();
3217                 physicalCameraIds.add(activePhysicalCameraId);
3218             } else {
3219                 physicalCameraIds = staticInfo.getCharacteristics().getPhysicalCameraIds();
3220             }
3221 
3222             for (String physicalCameraId : physicalCameraIds) {
3223                 StaticMetadata physicalStaticInfo = allStaticInfo.get(physicalCameraId);
3224                 if (physicalStaticInfo != null) {
3225                     float[] focalLengthsArray =
3226                             physicalStaticInfo.getAvailableFocalLengthsChecked();
3227                     for (float focalLength: focalLengthsArray) {
3228                         focalLengths.add(focalLength);
3229                     }
3230                 }
3231             }
3232         }
3233 
3234         return focalLengths;
3235     }
3236 
3237     /**
3238      * Validate and return the focal length.
3239      *
3240      * @param result Capture result to get the focal length
3241      * @param supportedFocalLengths Valid focal lengths to check the result focal length against
3242      * @param collector The camera error collector
3243      * @return Focal length from capture result or -1 if focal length is not available.
3244      */
validateFocalLength(CaptureResult result, Set<Float> supportedFocalLengths, CameraErrorCollector collector)3245     private static float validateFocalLength(CaptureResult result,
3246             Set<Float> supportedFocalLengths, CameraErrorCollector collector) {
3247         Float resultFocalLength = result.get(CaptureResult.LENS_FOCAL_LENGTH);
3248         if (collector.expectTrue("Focal length is invalid",
3249                 resultFocalLength != null && resultFocalLength > 0)) {
3250             collector.expectTrue("Focal length should be one of the available focal length",
3251                     supportedFocalLengths.contains(resultFocalLength));
3252             return resultFocalLength;
3253         }
3254         return -1;
3255     }
3256 
3257     /**
3258      * Get all of the supported apertures for capture result.
3259      *
3260      * If the camera is a logical camera, return the apertures of the logical camera
3261      * and its active physical camera.
3262      *
3263      * If the camera isn't a logical camera, return the apertures supported by the
3264      * single camera.
3265      */
getAvailableAperturesForResult(CaptureResult result, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo)3266     private static Set<Float> getAvailableAperturesForResult(CaptureResult result,
3267             StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo) {
3268         Set<Float> allApertures = new HashSet<Float>();
3269         float[] supportedApertures = staticInfo.getAvailableAperturesChecked();
3270         for (float aperture : supportedApertures) {
3271             allApertures.add(aperture);
3272         }
3273 
3274         if (staticInfo.isLogicalMultiCamera()) {
3275             boolean activePhysicalCameraIdSupported =
3276                     staticInfo.isActivePhysicalCameraIdSupported();
3277             Set<String> physicalCameraIds;
3278             if (activePhysicalCameraIdSupported) {
3279                 String activePhysicalCameraId = result.get(
3280                         CaptureResult.LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID);
3281                 physicalCameraIds = new HashSet<String>();
3282                 physicalCameraIds.add(activePhysicalCameraId);
3283             } else {
3284                 physicalCameraIds = staticInfo.getCharacteristics().getPhysicalCameraIds();
3285             }
3286 
3287             for (String physicalCameraId : physicalCameraIds) {
3288                 StaticMetadata physicalStaticInfo = allStaticInfo.get(physicalCameraId);
3289                 if (physicalStaticInfo != null) {
3290                     float[] apertures = physicalStaticInfo.getAvailableAperturesChecked();
3291                     for (float aperture: apertures) {
3292                         allApertures.add(aperture);
3293                     }
3294                 }
3295             }
3296         }
3297 
3298         return allApertures;
3299     }
3300 
3301     /**
3302      * Validate and return the aperture.
3303      *
3304      * @param result Capture result to get the aperture
3305      * @return Aperture from capture result or -1 if aperture is not available.
3306      */
validateAperture(CaptureResult result, Set<Float> supportedApertures, CameraErrorCollector collector)3307     private static float validateAperture(CaptureResult result,
3308             Set<Float> supportedApertures, CameraErrorCollector collector) {
3309         Float resultAperture = result.get(CaptureResult.LENS_APERTURE);
3310         if (collector.expectTrue("Capture result aperture is invalid",
3311                 resultAperture != null && resultAperture > 0)) {
3312             collector.expectTrue("Aperture should be one of the available apertures",
3313                     supportedApertures.contains(resultAperture));
3314             return resultAperture;
3315         }
3316         return -1;
3317     }
3318 
3319     /**
3320      * Return the closest value in a Set of floats.
3321      */
getClosestValueInSet(Set<Float> values, float target)3322     private static float getClosestValueInSet(Set<Float> values, float target) {
3323         float minDistance = Float.MAX_VALUE;
3324         float closestValue = -1.0f;
3325         for(float value : values) {
3326             float distance = Math.abs(value - target);
3327             if (minDistance > distance) {
3328                 minDistance = distance;
3329                 closestValue = value;
3330             }
3331         }
3332 
3333         return closestValue;
3334     }
3335 
3336     /**
3337      * Return if two Location's GPS field are the same.
3338      */
areGpsFieldsEqual(Location a, Location b)3339     private static boolean areGpsFieldsEqual(Location a, Location b) {
3340         if (a == null || b == null) {
3341             return false;
3342         }
3343 
3344         return a.getTime() == b.getTime() && a.getLatitude() == b.getLatitude() &&
3345                 a.getLongitude() == b.getLongitude() && a.getAltitude() == b.getAltitude() &&
3346                 a.getProvider() == b.getProvider();
3347     }
3348 
3349     /**
3350      * Verify extra tags in JPEG EXIF
3351      */
verifyJpegExifExtraTags(ExifInterface exif, Size jpegSize, CaptureResult result, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo, CameraErrorCollector collector, ExifTestData expectedExifData)3352     private static void verifyJpegExifExtraTags(ExifInterface exif, Size jpegSize,
3353             CaptureResult result, StaticMetadata staticInfo,
3354             HashMap<String, StaticMetadata> allStaticInfo,
3355             CameraErrorCollector collector, ExifTestData expectedExifData)
3356             throws ParseException {
3357         /**
3358          * TAG_IMAGE_WIDTH and TAG_IMAGE_LENGTH and TAG_ORIENTATION.
3359          * Orientation and exif width/height need to be tested carefully, two cases:
3360          *
3361          * 1. Device rotate the image buffer physically, then exif width/height may not match
3362          * the requested still capture size, we need swap them to check.
3363          *
3364          * 2. Device use the exif tag to record the image orientation, it doesn't rotate
3365          * the jpeg image buffer itself. In this case, the exif width/height should always match
3366          * the requested still capture size, and the exif orientation should always match the
3367          * requested orientation.
3368          *
3369          */
3370         int exifWidth = exif.getAttributeInt(ExifInterface.TAG_IMAGE_WIDTH, /*defaultValue*/0);
3371         int exifHeight = exif.getAttributeInt(ExifInterface.TAG_IMAGE_LENGTH, /*defaultValue*/0);
3372         Size exifSize = new Size(exifWidth, exifHeight);
3373         // Orientation could be missing, which is ok, default to 0.
3374         int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
3375                 /*defaultValue*/-1);
3376         // Get requested orientation from result, because they should be same.
3377         if (collector.expectKeyValueNotNull(result, CaptureResult.JPEG_ORIENTATION) != null) {
3378             int requestedOrientation = result.get(CaptureResult.JPEG_ORIENTATION);
3379             final int ORIENTATION_MIN = ExifInterface.ORIENTATION_UNDEFINED;
3380             final int ORIENTATION_MAX = ExifInterface.ORIENTATION_ROTATE_270;
3381             boolean orientationValid = collector.expectTrue(String.format(
3382                     "Exif orientation must be in range of [%d, %d]",
3383                     ORIENTATION_MIN, ORIENTATION_MAX),
3384                     exifOrientation >= ORIENTATION_MIN && exifOrientation <= ORIENTATION_MAX);
3385             if (orientationValid) {
3386                 /**
3387                  * Device captured image doesn't respect the requested orientation,
3388                  * which means it rotates the image buffer physically. Then we
3389                  * should swap the exif width/height accordingly to compare.
3390                  */
3391                 boolean deviceRotatedImage = exifOrientation == ExifInterface.ORIENTATION_UNDEFINED;
3392 
3393                 if (deviceRotatedImage) {
3394                     // Case 1.
3395                     boolean needSwap = (requestedOrientation % 180 == 90);
3396                     if (needSwap) {
3397                         exifSize = new Size(exifHeight, exifWidth);
3398                     }
3399                 } else {
3400                     // Case 2.
3401                     collector.expectEquals("Exif orientaiton should match requested orientation",
3402                             requestedOrientation, getExifOrientationInDegree(exifOrientation,
3403                             collector));
3404                 }
3405             }
3406         }
3407 
3408         /**
3409          * Ideally, need check exifSize == jpegSize == actual buffer size. But
3410          * jpegSize == jpeg decode bounds size(from jpeg jpeg frame
3411          * header, not exif) was validated in ImageReaderTest, no need to
3412          * validate again here.
3413          */
3414         collector.expectEquals("Exif size should match jpeg capture size", jpegSize, exifSize);
3415 
3416         // TAG_DATETIME, it should be local time
3417         long currentTimeInMs = System.currentTimeMillis();
3418         long currentTimeInSecond = currentTimeInMs / 1000;
3419         Date date = new Date(currentTimeInMs);
3420         String localDatetime = new SimpleDateFormat("yyyy:MM:dd HH:").format(date);
3421         String dateTime = exif.getAttribute(ExifInterface.TAG_DATETIME);
3422         if (collector.expectTrue("Exif TAG_DATETIME shouldn't be null", dateTime != null)) {
3423             collector.expectTrue("Exif TAG_DATETIME is wrong",
3424                     dateTime.length() == EXIF_DATETIME_LENGTH);
3425             long exifTimeInSecond =
3426                     new SimpleDateFormat("yyyy:MM:dd HH:mm:ss").parse(dateTime).getTime() / 1000;
3427             long delta = currentTimeInSecond - exifTimeInSecond;
3428             collector.expectTrue("Capture time deviates too much from the current time",
3429                     Math.abs(delta) < EXIF_DATETIME_ERROR_MARGIN_SEC);
3430             // It should be local time.
3431             collector.expectTrue("Exif date time should be local time",
3432                     dateTime.startsWith(localDatetime));
3433         }
3434 
3435         boolean isExternalCamera = staticInfo.isExternalCamera();
3436         if (!isExternalCamera) {
3437             // TAG_FOCAL_LENGTH.
3438             Set<Float> focalLengths = getAvailableFocalLengthsForResult(
3439                     result, staticInfo, allStaticInfo);
3440             float exifFocalLength = (float)exif.getAttributeDouble(
3441                         ExifInterface.TAG_FOCAL_LENGTH, -1);
3442             collector.expectEquals("Focal length should match",
3443                     getClosestValueInSet(focalLengths, exifFocalLength),
3444                     exifFocalLength, EXIF_FOCAL_LENGTH_ERROR_MARGIN);
3445             // More checks for focal length.
3446             collector.expectEquals("Exif focal length should match capture result",
3447                     validateFocalLength(result, focalLengths, collector),
3448                     exifFocalLength, EXIF_FOCAL_LENGTH_ERROR_MARGIN);
3449 
3450             // TAG_EXPOSURE_TIME
3451             // ExifInterface API gives exposure time value in the form of float instead of rational
3452             String exposureTime = exif.getAttribute(ExifInterface.TAG_EXPOSURE_TIME);
3453             collector.expectNotNull("Exif TAG_EXPOSURE_TIME shouldn't be null", exposureTime);
3454             if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_EXPOSURE_TIME)) {
3455                 if (exposureTime != null) {
3456                     double exposureTimeValue = Double.parseDouble(exposureTime);
3457                     long expTimeResult = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
3458                     double expected = expTimeResult / 1e9;
3459                     double tolerance = expected * EXIF_EXPOSURE_TIME_ERROR_MARGIN_RATIO;
3460                     tolerance = Math.max(tolerance, EXIF_EXPOSURE_TIME_MIN_ERROR_MARGIN_SEC);
3461                     collector.expectEquals("Exif exposure time doesn't match", expected,
3462                             exposureTimeValue, tolerance);
3463                 }
3464             }
3465 
3466             // TAG_APERTURE
3467             // ExifInterface API gives aperture value in the form of float instead of rational
3468             String exifAperture = exif.getAttribute(ExifInterface.TAG_APERTURE);
3469             collector.expectNotNull("Exif TAG_APERTURE shouldn't be null", exifAperture);
3470             if (staticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_AVAILABLE_APERTURES)) {
3471                 Set<Float> apertures = getAvailableAperturesForResult(
3472                         result, staticInfo, allStaticInfo);
3473                 if (exifAperture != null) {
3474                     float apertureValue = Float.parseFloat(exifAperture);
3475                     collector.expectEquals("Aperture value should match",
3476                             getClosestValueInSet(apertures, apertureValue),
3477                             apertureValue, EXIF_APERTURE_ERROR_MARGIN);
3478                     // More checks for aperture.
3479                     collector.expectEquals("Exif aperture length should match capture result",
3480                             validateAperture(result, apertures, collector),
3481                             apertureValue, EXIF_APERTURE_ERROR_MARGIN);
3482                 }
3483             }
3484 
3485             // TAG_MAKE
3486             String make = exif.getAttribute(ExifInterface.TAG_MAKE);
3487             collector.expectEquals("Exif TAG_MAKE is incorrect", Build.MANUFACTURER, make);
3488 
3489             // TAG_MODEL
3490             String model = exif.getAttribute(ExifInterface.TAG_MODEL);
3491             collector.expectEquals("Exif TAG_MODEL is incorrect", Build.MODEL, model);
3492 
3493 
3494             // TAG_ISO
3495             int iso = exif.getAttributeInt(ExifInterface.TAG_ISO, /*defaultValue*/-1);
3496             if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_SENSITIVITY) ||
3497                     staticInfo.areKeysAvailable(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
3498                 int expectedIso = 100;
3499                 if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_SENSITIVITY)) {
3500                     expectedIso = result.get(CaptureResult.SENSOR_SENSITIVITY);
3501                 }
3502                 if (staticInfo.areKeysAvailable(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
3503                     expectedIso = expectedIso *
3504                             result.get(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST);
3505                 } else {
3506                     expectedIso *= 100;
3507                 }
3508                 collector.expectInRange("Exif TAG_ISO is incorrect", iso,
3509                         expectedIso/100,((expectedIso + 50)/100) + MAX_ISO_MISMATCH);
3510             }
3511         } else {
3512             // External camera specific checks
3513             // TAG_MAKE
3514             String make = exif.getAttribute(ExifInterface.TAG_MAKE);
3515             collector.expectNotNull("Exif TAG_MAKE is null", make);
3516 
3517             // TAG_MODEL
3518             String model = exif.getAttribute(ExifInterface.TAG_MODEL);
3519             collector.expectNotNull("Exif TAG_MODEL is nuill", model);
3520         }
3521 
3522 
3523         /**
3524          * TAG_FLASH. TODO: For full devices, can check a lot more info
3525          * (http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/EXIF.html#Flash)
3526          */
3527         String flash = exif.getAttribute(ExifInterface.TAG_FLASH);
3528         collector.expectNotNull("Exif TAG_FLASH shouldn't be null", flash);
3529 
3530         /**
3531          * TAG_WHITE_BALANCE. TODO: For full devices, with the DNG tags, we
3532          * should be able to cross-check android.sensor.referenceIlluminant.
3533          */
3534         String whiteBalance = exif.getAttribute(ExifInterface.TAG_WHITE_BALANCE);
3535         collector.expectNotNull("Exif TAG_WHITE_BALANCE shouldn't be null", whiteBalance);
3536 
3537         // TAG_DATETIME_DIGITIZED (a.k.a Create time for digital cameras).
3538         String digitizedTime = exif.getAttribute(ExifInterface.TAG_DATETIME_DIGITIZED);
3539         collector.expectNotNull("Exif TAG_DATETIME_DIGITIZED shouldn't be null", digitizedTime);
3540         if (digitizedTime != null) {
3541             String expectedDateTime = exif.getAttribute(ExifInterface.TAG_DATETIME);
3542             collector.expectNotNull("Exif TAG_DATETIME shouldn't be null", expectedDateTime);
3543             if (expectedDateTime != null) {
3544                 collector.expectEquals("dataTime should match digitizedTime",
3545                         expectedDateTime, digitizedTime);
3546             }
3547         }
3548 
3549         /**
3550          * TAG_SUBSEC_TIME. Since the sub second tag strings are truncated to at
3551          * most 9 digits in ExifInterface implementation, use getAttributeInt to
3552          * sanitize it. When the default value -1 is returned, it means that
3553          * this exif tag either doesn't exist or is a non-numerical invalid
3554          * string. Same rule applies to the rest of sub second tags.
3555          */
3556         int subSecTime = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME, /*defaultValue*/-1);
3557         collector.expectTrue("Exif TAG_SUBSEC_TIME value is null or invalid!", subSecTime >= 0);
3558 
3559         // TAG_SUBSEC_TIME_ORIG
3560         int subSecTimeOrig = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME_ORIG,
3561                 /*defaultValue*/-1);
3562         collector.expectTrue("Exif TAG_SUBSEC_TIME_ORIG value is null or invalid!",
3563                 subSecTimeOrig >= 0);
3564 
3565         // TAG_SUBSEC_TIME_DIG
3566         int subSecTimeDig = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME_DIG,
3567                 /*defaultValue*/-1);
3568         collector.expectTrue(
3569                 "Exif TAG_SUBSEC_TIME_DIG value is null or invalid!", subSecTimeDig >= 0);
3570 
3571         /**
3572          * TAG_GPS_DATESTAMP & TAG_GPS_TIMESTAMP.
3573          * The GPS timestamp information should be in seconds UTC time.
3574          */
3575         String gpsDatestamp = exif.getAttribute(ExifInterface.TAG_GPS_DATESTAMP);
3576         collector.expectNotNull("Exif TAG_GPS_DATESTAMP shouldn't be null", gpsDatestamp);
3577         String gpsTimestamp = exif.getAttribute(ExifInterface.TAG_GPS_TIMESTAMP);
3578         collector.expectNotNull("Exif TAG_GPS_TIMESTAMP shouldn't be null", gpsTimestamp);
3579 
3580         SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy:MM:dd hh:mm:ss z");
3581         String gpsExifTimeString = gpsDatestamp + " " + gpsTimestamp + " UTC";
3582         Date gpsDateTime = dateFormat.parse(gpsExifTimeString);
3583         Date expected = new Date(expectedExifData.gpsLocation.getTime());
3584         collector.expectEquals("Jpeg EXIF GPS time should match", expected, gpsDateTime);
3585     }
3586 
3587 
3588     /**
3589      * Immutable class wrapping the exif test data.
3590      */
3591     public static class ExifTestData {
3592         public final Location gpsLocation;
3593         public final int jpegOrientation;
3594         public final byte jpegQuality;
3595         public final byte thumbnailQuality;
3596 
ExifTestData(Location location, int orientation, byte jpgQuality, byte thumbQuality)3597         public ExifTestData(Location location, int orientation,
3598                 byte jpgQuality, byte thumbQuality) {
3599             gpsLocation = location;
3600             jpegOrientation = orientation;
3601             jpegQuality = jpgQuality;
3602             thumbnailQuality = thumbQuality;
3603         }
3604     }
3605 
getPreviewSizeBound(WindowManager windowManager, Size bound)3606     public static Size getPreviewSizeBound(WindowManager windowManager, Size bound) {
3607         WindowMetrics windowMetrics = windowManager.getCurrentWindowMetrics();
3608         Rect windowBounds = windowMetrics.getBounds();
3609 
3610         int windowHeight = windowBounds.height();
3611         int windowWidth = windowBounds.width();
3612 
3613         if (windowHeight > windowWidth) {
3614             windowHeight = windowWidth;
3615             windowWidth = windowBounds.height();
3616         }
3617 
3618         if (bound.getWidth() <= windowWidth
3619                 && bound.getHeight() <= windowHeight) {
3620             return bound;
3621         } else {
3622             return new Size(windowWidth, windowHeight);
3623         }
3624     }
3625 
3626     /**
3627      * Check if a particular stream configuration is supported by configuring it
3628      * to the device.
3629      */
isStreamConfigurationSupported(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)3630     public static boolean isStreamConfigurationSupported(CameraDevice camera,
3631             List<Surface> outputSurfaces,
3632             CameraCaptureSession.StateCallback listener, Handler handler) {
3633         try {
3634             configureCameraSession(camera, outputSurfaces, listener, handler);
3635             return true;
3636         } catch (Exception e) {
3637             Log.i(TAG, "This stream configuration is not supported due to " + e.getMessage());
3638             return false;
3639         }
3640     }
3641 
3642     public final static class SessionConfigSupport {
3643         public final boolean error;
3644         public final boolean callSupported;
3645         public final boolean configSupported;
3646 
SessionConfigSupport(boolean error, boolean callSupported, boolean configSupported)3647         public SessionConfigSupport(boolean error,
3648                 boolean callSupported, boolean configSupported) {
3649             this.error = error;
3650             this.callSupported = callSupported;
3651             this.configSupported = configSupported;
3652         }
3653     }
3654 
3655     /**
3656      * Query whether a particular stream combination is supported.
3657      */
checkSessionConfigurationWithSurfaces(CameraDevice camera, Handler handler, List<Surface> outputSurfaces, InputConfiguration inputConfig, int operatingMode, boolean defaultSupport, String msg)3658     public static void checkSessionConfigurationWithSurfaces(CameraDevice camera,
3659             Handler handler, List<Surface> outputSurfaces, InputConfiguration inputConfig,
3660             int operatingMode, boolean defaultSupport, String msg) {
3661         List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size());
3662         for (Surface surface : outputSurfaces) {
3663             outConfigurations.add(new OutputConfiguration(surface));
3664         }
3665 
3666         checkSessionConfigurationSupported(camera, handler, outConfigurations,
3667                 inputConfig, operatingMode, defaultSupport, msg);
3668     }
3669 
checkSessionConfigurationSupported(CameraDevice camera, Handler handler, List<OutputConfiguration> outputConfigs, InputConfiguration inputConfig, int operatingMode, boolean defaultSupport, String msg)3670     public static void checkSessionConfigurationSupported(CameraDevice camera,
3671             Handler handler, List<OutputConfiguration> outputConfigs,
3672             InputConfiguration inputConfig, int operatingMode, boolean defaultSupport,
3673             String msg) {
3674         SessionConfigSupport sessionConfigSupported =
3675                 isSessionConfigSupported(camera, handler, outputConfigs, inputConfig,
3676                 operatingMode, defaultSupport);
3677 
3678         assertTrue(msg, !sessionConfigSupported.error && sessionConfigSupported.configSupported);
3679     }
3680 
3681     /**
3682      * Query whether a particular stream combination is supported.
3683      */
isSessionConfigSupported(CameraDevice camera, Handler handler, List<OutputConfiguration> outputConfigs, InputConfiguration inputConfig, int operatingMode, boolean defaultSupport)3684     public static SessionConfigSupport isSessionConfigSupported(CameraDevice camera,
3685             Handler handler, List<OutputConfiguration> outputConfigs,
3686             InputConfiguration inputConfig, int operatingMode, boolean defaultSupport) {
3687         boolean ret;
3688         BlockingSessionCallback sessionListener = new BlockingSessionCallback();
3689 
3690         SessionConfiguration sessionConfig = new SessionConfiguration(operatingMode, outputConfigs,
3691                 new HandlerExecutor(handler), sessionListener);
3692         if (inputConfig != null) {
3693             sessionConfig.setInputConfiguration(inputConfig);
3694         }
3695 
3696         try {
3697             ret = camera.isSessionConfigurationSupported(sessionConfig);
3698         } catch (UnsupportedOperationException e) {
3699             // Camera doesn't support session configuration query
3700             return new SessionConfigSupport(false/*error*/,
3701                     false/*callSupported*/, defaultSupport/*configSupported*/);
3702         } catch (IllegalArgumentException e) {
3703             return new SessionConfigSupport(true/*error*/,
3704                     false/*callSupported*/, false/*configSupported*/);
3705         } catch (android.hardware.camera2.CameraAccessException e) {
3706             return new SessionConfigSupport(true/*error*/,
3707                     false/*callSupported*/, false/*configSupported*/);
3708         }
3709 
3710         return new SessionConfigSupport(false/*error*/,
3711                 true/*callSupported*/, ret/*configSupported*/);
3712     }
3713 
3714     /**
3715      * Wait for numResultWait frames
3716      *
3717      * @param resultListener The capture listener to get capture result back.
3718      * @param numResultsWait Number of frame to wait
3719      * @param timeout Wait timeout in ms.
3720      *
3721      * @return the last result, or {@code null} if there was none
3722      */
waitForNumResults(SimpleCaptureCallback resultListener, int numResultsWait, int timeout)3723     public static CaptureResult waitForNumResults(SimpleCaptureCallback resultListener,
3724             int numResultsWait, int timeout) {
3725         if (numResultsWait < 0 || resultListener == null) {
3726             throw new IllegalArgumentException(
3727                     "Input must be positive number and listener must be non-null");
3728         }
3729 
3730         CaptureResult result = null;
3731         for (int i = 0; i < numResultsWait; i++) {
3732             result = resultListener.getCaptureResult(timeout);
3733         }
3734 
3735         return result;
3736     }
3737 
3738     /**
3739      * Wait for any expected result key values available in a certain number of results.
3740      *
3741      * <p>
3742      * Check the result immediately if numFramesWait is 0.
3743      * </p>
3744      *
3745      * @param listener The capture listener to get capture result.
3746      * @param resultKey The capture result key associated with the result value.
3747      * @param expectedValues The list of result value need to be waited for,
3748      * return immediately if the list is empty.
3749      * @param numResultsWait Number of frame to wait before times out.
3750      * @param timeout result wait time out in ms.
3751      * @throws TimeoutRuntimeException If more than numResultsWait results are.
3752      * seen before the result matching myRequest arrives, or each individual wait
3753      * for result times out after 'timeout' ms.
3754      */
waitForAnyResultValue(SimpleCaptureCallback listener, CaptureResult.Key<T> resultKey, List<T> expectedValues, int numResultsWait, int timeout)3755     public static <T> void waitForAnyResultValue(SimpleCaptureCallback listener,
3756             CaptureResult.Key<T> resultKey, List<T> expectedValues, int numResultsWait,
3757             int timeout) {
3758         if (numResultsWait < 0 || listener == null || expectedValues == null) {
3759             throw new IllegalArgumentException(
3760                     "Input must be non-negative number and listener/expectedValues "
3761                     + "must be non-null");
3762         }
3763 
3764         int i = 0;
3765         CaptureResult result;
3766         do {
3767             result = listener.getCaptureResult(timeout);
3768             T value = result.get(resultKey);
3769             for ( T expectedValue : expectedValues) {
3770                 if (VERBOSE) {
3771                     Log.v(TAG, "Current result value for key " + resultKey.getName() + " is: "
3772                             + value.toString());
3773                 }
3774                 if (value.equals(expectedValue)) {
3775                     return;
3776                 }
3777             }
3778         } while (i++ < numResultsWait);
3779 
3780         throw new TimeoutRuntimeException(
3781                 "Unable to get the expected result value " + expectedValues + " for key " +
3782                         resultKey.getName() + " after waiting for " + numResultsWait + " results");
3783     }
3784 
3785     /**
3786      * Wait for expected result key value available in a certain number of results.
3787      *
3788      * <p>
3789      * Check the result immediately if numFramesWait is 0.
3790      * </p>
3791      *
3792      * @param listener The capture listener to get capture result
3793      * @param resultKey The capture result key associated with the result value
3794      * @param expectedValue The result value need to be waited for
3795      * @param numResultsWait Number of frame to wait before times out
3796      * @param timeout Wait time out.
3797      * @throws TimeoutRuntimeException If more than numResultsWait results are
3798      * seen before the result matching myRequest arrives, or each individual wait
3799      * for result times out after 'timeout' ms.
3800      */
waitForResultValue(SimpleCaptureCallback listener, CaptureResult.Key<T> resultKey, T expectedValue, int numResultsWait, int timeout)3801     public static <T> void waitForResultValue(SimpleCaptureCallback listener,
3802             CaptureResult.Key<T> resultKey, T expectedValue, int numResultsWait, int timeout) {
3803         List<T> expectedValues = new ArrayList<T>();
3804         expectedValues.add(expectedValue);
3805         waitForAnyResultValue(listener, resultKey, expectedValues, numResultsWait, timeout);
3806     }
3807 
3808     /**
3809      * Wait for AE to be stabilized before capture: CONVERGED or FLASH_REQUIRED.
3810      *
3811      * <p>Waits for {@code android.sync.maxLatency} number of results first, to make sure
3812      * that the result is synchronized (or {@code numResultWaitForUnknownLatency} if the latency
3813      * is unknown.</p>
3814      *
3815      * <p>This is a no-op for {@code LEGACY} devices since they don't report
3816      * the {@code aeState} result.</p>
3817      *
3818      * @param resultListener The capture listener to get capture result back.
3819      * @param numResultWaitForUnknownLatency Number of frame to wait if camera device latency is
3820      *                                       unknown.
3821      * @param staticInfo corresponding camera device static metadata.
3822      * @param settingsTimeout wait timeout for settings application in ms.
3823      * @param resultTimeout wait timeout for result in ms.
3824      * @param numResultsWait Number of frame to wait before times out.
3825      */
waitForAeStable(SimpleCaptureCallback resultListener, int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int settingsTimeout, int numResultWait)3826     public static void waitForAeStable(SimpleCaptureCallback resultListener,
3827             int numResultWaitForUnknownLatency, StaticMetadata staticInfo,
3828             int settingsTimeout, int numResultWait) {
3829         waitForSettingsApplied(resultListener, numResultWaitForUnknownLatency, staticInfo,
3830                 settingsTimeout);
3831 
3832         if (!staticInfo.isHardwareLevelAtLeastLimited()) {
3833             // No-op for metadata
3834             return;
3835         }
3836         List<Integer> expectedAeStates = new ArrayList<Integer>();
3837         expectedAeStates.add(new Integer(CaptureResult.CONTROL_AE_STATE_CONVERGED));
3838         expectedAeStates.add(new Integer(CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED));
3839         waitForAnyResultValue(resultListener, CaptureResult.CONTROL_AE_STATE, expectedAeStates,
3840                 numResultWait, settingsTimeout);
3841     }
3842 
3843     /**
3844      * Wait for enough results for settings to be applied
3845      *
3846      * @param resultListener The capture listener to get capture result back.
3847      * @param numResultWaitForUnknownLatency Number of frame to wait if camera device latency is
3848      *                                       unknown.
3849      * @param staticInfo corresponding camera device static metadata.
3850      * @param timeout wait timeout in ms.
3851      */
waitForSettingsApplied(SimpleCaptureCallback resultListener, int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int timeout)3852     public static void waitForSettingsApplied(SimpleCaptureCallback resultListener,
3853             int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int timeout) {
3854         int maxLatency = staticInfo.getSyncMaxLatency();
3855         if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) {
3856             maxLatency = numResultWaitForUnknownLatency;
3857         }
3858         // Wait for settings to take effect
3859         waitForNumResults(resultListener, maxLatency, timeout);
3860     }
3861 
getSuitableFpsRangeForDuration(String cameraId, long frameDuration, StaticMetadata staticInfo)3862     public static Range<Integer> getSuitableFpsRangeForDuration(String cameraId,
3863             long frameDuration, StaticMetadata staticInfo) {
3864         // Add 0.05 here so Fps like 29.99 evaluated to 30
3865         int minBurstFps = (int) Math.floor(1e9 / frameDuration + 0.05f);
3866         boolean foundConstantMaxYUVRange = false;
3867         boolean foundYUVStreamingRange = false;
3868         boolean isExternalCamera = staticInfo.isExternalCamera();
3869         boolean isNIR = staticInfo.isNIRColorFilter();
3870 
3871         // Find suitable target FPS range - as high as possible that covers the max YUV rate
3872         // Also verify that there's a good preview rate as well
3873         List<Range<Integer> > fpsRanges = Arrays.asList(
3874                 staticInfo.getAeAvailableTargetFpsRangesChecked());
3875         Range<Integer> targetRange = null;
3876         for (Range<Integer> fpsRange : fpsRanges) {
3877             if (fpsRange.getLower() == minBurstFps && fpsRange.getUpper() == minBurstFps) {
3878                 foundConstantMaxYUVRange = true;
3879                 targetRange = fpsRange;
3880             } else if (isExternalCamera && fpsRange.getUpper() == minBurstFps) {
3881                 targetRange = fpsRange;
3882             }
3883             if (fpsRange.getLower() <= 15 && fpsRange.getUpper() == minBurstFps) {
3884                 foundYUVStreamingRange = true;
3885             }
3886 
3887         }
3888 
3889         if (!isExternalCamera) {
3890             assertTrue(String.format("Cam %s: Target FPS range of (%d, %d) must be supported",
3891                     cameraId, minBurstFps, minBurstFps), foundConstantMaxYUVRange);
3892         }
3893 
3894         if (!isNIR) {
3895             assertTrue(String.format(
3896                     "Cam %s: Target FPS range of (x, %d) where x <= 15 must be supported",
3897                     cameraId, minBurstFps), foundYUVStreamingRange);
3898         }
3899         return targetRange;
3900     }
3901     /**
3902      * Get the candidate supported zoom ratios for testing
3903      *
3904      * <p>
3905      * This function returns the bounary values of supported zoom ratio range in addition to 1.0x
3906      * zoom ratio.
3907      * </p>
3908      */
getCandidateZoomRatios(StaticMetadata staticInfo)3909     public static List<Float> getCandidateZoomRatios(StaticMetadata staticInfo) {
3910         List<Float> zoomRatios = new ArrayList<Float>();
3911         Range<Float> zoomRatioRange = staticInfo.getZoomRatioRangeChecked();
3912         zoomRatios.add(zoomRatioRange.getLower());
3913         if (zoomRatioRange.contains(1.0f) &&
3914                 1.0f - zoomRatioRange.getLower() > ZOOM_RATIO_THRESHOLD &&
3915                 zoomRatioRange.getUpper() - 1.0f > ZOOM_RATIO_THRESHOLD) {
3916             zoomRatios.add(1.0f);
3917         }
3918         zoomRatios.add(zoomRatioRange.getUpper());
3919 
3920         return zoomRatios;
3921     }
3922 
3923     /**
3924      * Check whether a camera Id is a primary rear facing camera
3925      */
isPrimaryRearFacingCamera(CameraManager manager, String cameraId)3926     public static boolean isPrimaryRearFacingCamera(CameraManager manager, String cameraId)
3927             throws Exception {
3928         return isPrimaryCamera(manager, cameraId, CameraCharacteristics.LENS_FACING_BACK);
3929     }
3930 
3931     /**
3932      * Check whether a camera Id is a primary front facing camera
3933      */
isPrimaryFrontFacingCamera(CameraManager manager, String cameraId)3934     public static boolean isPrimaryFrontFacingCamera(CameraManager manager, String cameraId)
3935             throws Exception {
3936         return isPrimaryCamera(manager, cameraId, CameraCharacteristics.LENS_FACING_FRONT);
3937     }
3938 
isPrimaryCamera(CameraManager manager, String cameraId, Integer lensFacing)3939     private static boolean isPrimaryCamera(CameraManager manager, String cameraId,
3940             Integer lensFacing) throws Exception {
3941         CameraCharacteristics characteristics;
3942         Integer facing;
3943 
3944         String [] ids = manager.getCameraIdList();
3945         for (String id : ids) {
3946             characteristics = manager.getCameraCharacteristics(id);
3947             facing = characteristics.get(CameraCharacteristics.LENS_FACING);
3948             if (lensFacing.equals(facing)) {
3949                 if (cameraId.equals(id)) {
3950                     return true;
3951                 } else {
3952                     return false;
3953                 }
3954             }
3955         }
3956         return false;
3957     }
3958 }
3959