• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.CAMERA_IDLE_TIMEOUT_MS;
20 import static android.hardware.camera2.cts.CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS;
21 import static android.hardware.camera2.cts.CameraTestUtils.SESSION_READY_TIMEOUT_MS;
22 import static android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
23 import static android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener;
24 import static android.hardware.camera2.cts.CameraTestUtils.dumpFile;
25 import static android.hardware.camera2.cts.CameraTestUtils.getUnavailablePhysicalCameras;
26 import static android.hardware.camera2.cts.CameraTestUtils.getValueNotNull;
27 
28 import static com.google.common.truth.Truth.assertWithMessage;
29 
30 import static junit.framework.Assert.assertEquals;
31 import static junit.framework.Assert.assertFalse;
32 import static junit.framework.Assert.assertNotNull;
33 import static junit.framework.Assert.assertTrue;
34 import static junit.framework.Assert.fail;
35 
36 import android.graphics.Bitmap;
37 import android.graphics.BitmapFactory;
38 import android.graphics.BitmapRegionDecoder;
39 import android.graphics.Canvas;
40 import android.graphics.Color;
41 import android.graphics.ColorSpace;
42 import android.graphics.ImageFormat;
43 import android.graphics.Matrix;
44 import android.graphics.PixelFormat;
45 import android.graphics.Rect;
46 import android.graphics.RectF;
47 import android.hardware.DataSpace;
48 import android.hardware.HardwareBuffer;
49 import android.hardware.camera2.CameraCharacteristics;
50 import android.hardware.camera2.CameraDevice;
51 import android.hardware.camera2.CameraManager;
52 import android.hardware.camera2.CameraMetadata;
53 import android.hardware.camera2.CaptureRequest;
54 import android.hardware.camera2.CaptureResult;
55 import android.hardware.camera2.cts.CameraTestUtils.ImageDropperListener;
56 import android.hardware.camera2.cts.helpers.StaticMetadata;
57 import android.hardware.camera2.cts.rs.BitmapUtils;
58 import android.hardware.camera2.cts.testcases.Camera2AndroidTestCase;
59 import android.hardware.camera2.params.DynamicRangeProfiles;
60 import android.hardware.camera2.params.OutputConfiguration;
61 import android.hardware.camera2.params.StreamConfigurationMap;
62 import android.media.Image;
63 import android.media.Image.Plane;
64 import android.media.ImageReader;
65 import android.media.ImageWriter;
66 import android.os.Build;
67 import android.os.ConditionVariable;
68 import android.os.SystemClock;
69 import android.os.SystemProperties;
70 import android.platform.test.annotations.RequiresFlagsEnabled;
71 import android.platform.test.flag.junit.CheckFlagsRule;
72 import android.platform.test.flag.junit.DeviceFlagsValueProvider;
73 import android.util.Log;
74 import android.util.Pair;
75 import android.util.Size;
76 import android.view.Surface;
77 
78 import com.android.compatibility.common.util.PropertyUtil;
79 import com.android.internal.camera.flags.Flags;
80 import com.android.ex.camera2.blocking.BlockingSessionCallback;
81 
82 import org.junit.Rule;
83 import org.junit.Test;
84 import org.junit.runner.RunWith;
85 import org.junit.runners.Parameterized;
86 
87 import java.nio.ByteBuffer;
88 import java.util.ArrayList;
89 import java.util.Arrays;
90 import java.util.List;
91 import java.util.Set;
92 import java.util.concurrent.TimeUnit;
93 
94 /**
95  * <p>Basic test for ImageReader APIs. It uses CameraDevice as producer, camera
96  * sends the data to the surface provided by imageReader. Below image formats
97  * are tested:</p>
98  *
99  * <p>YUV_420_888: flexible YUV420, it is mandatory format for camera. </p>
100  * <p>JPEG: used for JPEG still capture, also mandatory format. </p>
101  * <p>Some invalid access test. </p>
102  * <p>TODO: Add more format tests? </p>
103  */
104 @RunWith(Parameterized.class)
105 public class ImageReaderTest extends Camera2AndroidTestCase {
106     private static final String TAG = "ImageReaderTest";
107     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
108     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
109 
110     // Number of frame (for streaming requests) to be verified.
111     private static final int NUM_FRAME_VERIFIED = 2;
112     // Number of frame (for streaming requests) to be verified with log processing time.
113     private static final int NUM_LONG_PROCESS_TIME_FRAME_VERIFIED = 10;
114     // The time to hold each image for to simulate long processing time.
115     private static final int LONG_PROCESS_TIME_MS = 300;
116     // Max number of images can be accessed simultaneously from ImageReader.
117     private static final int MAX_NUM_IMAGES = 5;
118     // Max difference allowed between YUV and JPEG patches. This tolerance is intentionally very
119     // generous to avoid false positives due to punch/saturation operations vendors apply to the
120     // JPEG outputs.
121     private static final double IMAGE_DIFFERENCE_TOLERANCE = 40;
122     // Legacy level devices needs even larger tolerance because jpeg and yuv are not captured
123     // from the same frame in legacy mode.
124     private static final double IMAGE_DIFFERENCE_TOLERANCE_LEGACY = 60;
125 
126     private SimpleImageListener mListener;
127 
128     @Override
setUp()129     public void setUp() throws Exception {
130         super.setUp();
131     }
132 
133     @Override
tearDown()134     public void tearDown() throws Exception {
135         super.tearDown();
136     }
137 
138     @Rule
139     public final CheckFlagsRule mCheckFlagsRule =
140             DeviceFlagsValueProvider.createCheckFlagsRule();
141 
142     @Test
testFlexibleYuv()143     public void testFlexibleYuv() throws Exception {
144         for (String id : getCameraIdsUnderTest()) {
145             try {
146                 Log.i(TAG, "Testing Camera " + id);
147                 openDevice(id);
148                 BufferFormatTestParam params = new BufferFormatTestParam(
149                         ImageFormat.YUV_420_888, /*repeating*/true);
150                 bufferFormatTestByCamera(params);
151             } finally {
152                 closeDevice(id);
153             }
154         }
155     }
156 
157     @Test
testDepth16()158     public void testDepth16() throws Exception {
159         for (String id : getCameraIdsUnderTest()) {
160             try {
161                 Log.i(TAG, "Testing Camera " + id);
162                 openDevice(id);
163                 BufferFormatTestParam params = new BufferFormatTestParam(
164                         ImageFormat.DEPTH16, /*repeating*/true);
165                 bufferFormatTestByCamera(params);
166             } finally {
167                 closeDevice(id);
168             }
169         }
170     }
171 
172     @Test
testDepthPointCloud()173     public void testDepthPointCloud() throws Exception {
174         for (String id : getCameraIdsUnderTest()) {
175             try {
176                 Log.i(TAG, "Testing Camera " + id);
177                 openDevice(id);
178                 BufferFormatTestParam params = new BufferFormatTestParam(
179                         ImageFormat.DEPTH_POINT_CLOUD, /*repeating*/true);
180                 bufferFormatTestByCamera(params);
181             } finally {
182                 closeDevice(id);
183             }
184         }
185     }
186 
187     @Test
testDynamicDepth()188     public void testDynamicDepth() throws Exception {
189         for (String id : getCameraIdsUnderTest()) {
190             try {
191                 openDevice(id);
192                 BufferFormatTestParam params = new BufferFormatTestParam(
193                         ImageFormat.DEPTH_JPEG, /*repeating*/true);
194                 params.mCheckSession = true;
195                 bufferFormatTestByCamera(params);
196             } finally {
197                 closeDevice(id);
198             }
199         }
200     }
201 
202     @Test
testY8()203     public void testY8() throws Exception {
204         for (String id : getCameraIdsUnderTest()) {
205             try {
206                 Log.i(TAG, "Testing Camera " + id);
207                 openDevice(id);
208                 BufferFormatTestParam params = new BufferFormatTestParam(
209                         ImageFormat.Y8, /*repeating*/true);
210                 bufferFormatTestByCamera(params);
211             } finally {
212                 closeDevice(id);
213             }
214         }
215     }
216 
217     @Test
testJpeg()218     public void testJpeg() throws Exception {
219         for (String id : getCameraIdsUnderTest()) {
220             try {
221                 Log.v(TAG, "Testing jpeg capture for Camera " + id);
222                 openDevice(id);
223                 BufferFormatTestParam params = new BufferFormatTestParam(
224                         ImageFormat.JPEG, /*repeating*/false);
225                 bufferFormatTestByCamera(params);
226             } finally {
227                 closeDevice(id);
228             }
229         }
230     }
231 
232     @Test
testRaw()233     public void testRaw() throws Exception {
234         for (String id : getCameraIdsUnderTest()) {
235             try {
236                 Log.v(TAG, "Testing raw capture for camera " + id);
237                 openDevice(id);
238                 BufferFormatTestParam params = new BufferFormatTestParam(
239                         ImageFormat.RAW_SENSOR, /*repeating*/false);
240                 bufferFormatTestByCamera(params);
241             } finally {
242                 closeDevice(id);
243             }
244         }
245     }
246 
247     @Test
testRawPrivate()248     public void testRawPrivate() throws Exception {
249         for (String id : getCameraIdsUnderTest()) {
250             try {
251                 Log.v(TAG, "Testing raw capture for camera " + id);
252                 openDevice(id);
253                 BufferFormatTestParam params = new BufferFormatTestParam(
254                         ImageFormat.RAW_PRIVATE, /*repeating*/false);
255                 bufferFormatTestByCamera(params);
256             } finally {
257                 closeDevice(id);
258             }
259         }
260     }
261 
262     @Test
testP010()263     public void testP010() throws Exception {
264         for (String id : getCameraIdsUnderTest()) {
265             try {
266                 Log.v(TAG, "Testing YUV P010 capture for Camera " + id);
267                 openDevice(id);
268                 if (!mStaticInfo.isCapabilitySupported(CameraCharacteristics.
269                             REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) {
270                     continue;
271                 }
272                 Set<Long> availableProfiles =
273                     mStaticInfo.getAvailableDynamicRangeProfilesChecked();
274                 assertFalse("Absent dynamic range profiles", availableProfiles.isEmpty());
275                 assertTrue("HLG10 not present in the available dynamic range profiles",
276                         availableProfiles.contains(DynamicRangeProfiles.HLG10));
277 
278                 BufferFormatTestParam params = new BufferFormatTestParam(
279                         ImageFormat.YCBCR_P010, /*repeating*/false);
280                 params.mDynamicRangeProfile = DynamicRangeProfiles.HLG10;
281                 bufferFormatTestByCamera(params);
282             } finally {
283                 closeDevice(id);
284             }
285         }
286     }
287 
288     @Test
289     @RequiresFlagsEnabled(android.media.codec.Flags.FLAG_P210_FORMAT_SUPPORT)
testP210()290     public void testP210() throws Exception {
291         for (String id : getCameraIdsUnderTest()) {
292             try {
293                 Log.v(TAG, "Testing YUV P210 capture for Camera " + id);
294                 openDevice(id);
295                 if (!mStaticInfo.isCapabilitySupported(CameraCharacteristics
296                             .REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) {
297                     Log.v(TAG, "No 10-bit output support for Camera " + id);
298                     continue;
299                 }
300                 if (mStaticInfo.getP210OutputSizesChecked().length == 0) {
301                     Log.v(TAG, "No YUV P210 output support for Camera " + id);
302                     continue;
303                 }
304                 Set<Long> availableProfiles =
305                         mStaticInfo.getAvailableDynamicRangeProfilesChecked();
306                 assertFalse("Absent dynamic range profiles", availableProfiles.isEmpty());
307                 assertTrue("HLG10 not present in the available dynamic range profiles",
308                         availableProfiles.contains(DynamicRangeProfiles.HLG10));
309 
310                 BufferFormatTestParam params = new BufferFormatTestParam(
311                         ImageFormat.YCBCR_P210, /*repeating*/false);
312                 params.mDynamicRangeProfile = DynamicRangeProfiles.HLG10;
313                 bufferFormatTestByCamera(params);
314             } finally {
315                 closeDevice(id);
316             }
317         }
318     }
319 
320     @Test
testDisplayP3Yuv()321     public void testDisplayP3Yuv() throws Exception {
322         for (String id : getCameraIdsUnderTest()) {
323             try {
324                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
325                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
326                     continue;
327                 }
328                 Set<ColorSpace.Named> availableColorSpaces =
329                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(
330                                 ImageFormat.YUV_420_888);
331 
332                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
333                     continue;
334                 }
335 
336                 openDevice(id);
337                 Log.v(TAG, "Testing Display P3 Yuv capture for Camera " + id);
338                 BufferFormatTestParam params = new BufferFormatTestParam(
339                         ImageFormat.YUV_420_888, /*repeating*/false);
340                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
341                 params.mUseColorSpace = true;
342                 bufferFormatTestByCamera(params);
343             } finally {
344                 closeDevice(id);
345             }
346         }
347     }
348 
349     @Test
testDisplayP3YuvRepeating()350     public void testDisplayP3YuvRepeating() throws Exception {
351         for (String id : getCameraIdsUnderTest()) {
352             try {
353                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
354                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
355                     continue;
356                 }
357                 Set<ColorSpace.Named> availableColorSpaces =
358                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(
359                                 ImageFormat.YUV_420_888);
360 
361                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
362                     continue;
363                 }
364 
365                 openDevice(id);
366                 Log.v(TAG, "Testing repeating Display P3 Yuv capture for Camera " + id);
367                 BufferFormatTestParam params = new BufferFormatTestParam(
368                         ImageFormat.YUV_420_888, /*repeating*/true);
369                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
370                 params.mUseColorSpace = true;
371                 bufferFormatTestByCamera(params);
372             } finally {
373                 closeDevice(id);
374             }
375         }
376     }
377 
378     @Test
testDisplayP3Heic()379     public void testDisplayP3Heic() throws Exception {
380         for (String id : getCameraIdsUnderTest()) {
381             try {
382                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
383                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
384                     continue;
385                 }
386                 Set<ColorSpace.Named> availableColorSpaces =
387                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.HEIC);
388 
389                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
390                     continue;
391                 }
392 
393                 openDevice(id);
394                 Log.v(TAG, "Testing Display P3 HEIC capture for Camera " + id);
395                 BufferFormatTestParam params = new BufferFormatTestParam(
396                         ImageFormat.HEIC, /*repeating*/false);
397                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
398                 params.mUseColorSpace = true;
399                 bufferFormatTestByCamera(params);
400             } finally {
401                 closeDevice(id);
402             }
403         }
404     }
405 
406     @Test
testDisplayP3HeicRepeating()407     public void testDisplayP3HeicRepeating() throws Exception {
408         for (String id : getCameraIdsUnderTest()) {
409             try {
410                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
411                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
412                     continue;
413                 }
414                 Set<ColorSpace.Named> availableColorSpaces =
415                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.HEIC);
416 
417                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
418                     continue;
419                 }
420 
421                 openDevice(id);
422                 Log.v(TAG, "Testing repeating Display P3 HEIC capture for Camera " + id);
423                 BufferFormatTestParam params = new BufferFormatTestParam(
424                         ImageFormat.HEIC, /*repeating*/true);
425                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
426                 params.mUseColorSpace = true;
427                 bufferFormatTestByCamera(params);
428             } finally {
429                 closeDevice(id);
430             }
431         }
432     }
433 
434     @Test
testDisplayP3Jpeg()435     public void testDisplayP3Jpeg() throws Exception {
436         for (String id : getCameraIdsUnderTest()) {
437             try {
438                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
439                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
440                     continue;
441                 }
442                 Set<ColorSpace.Named> availableColorSpaces =
443                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.JPEG);
444 
445                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
446                     continue;
447                 }
448 
449                 openDevice(id);
450                 Log.v(TAG, "Testing Display P3 JPEG capture for Camera " + id);
451                 BufferFormatTestParam params = new BufferFormatTestParam(
452                         ImageFormat.JPEG, /*repeating*/false);
453                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
454                 params.mUseColorSpace = true;
455                 bufferFormatTestByCamera(params);
456             } finally {
457                 closeDevice(id);
458             }
459         }
460     }
461 
462     @Test
testDisplayP3JpegRepeating()463     public void testDisplayP3JpegRepeating() throws Exception {
464         for (String id : getCameraIdsUnderTest()) {
465             try {
466                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
467                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
468                     continue;
469                 }
470                 Set<ColorSpace.Named> availableColorSpaces =
471                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.JPEG);
472 
473                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
474                     continue;
475                 }
476 
477                 openDevice(id);
478                 Log.v(TAG, "Testing repeating Display P3 JPEG capture for Camera " + id);
479                 BufferFormatTestParam params = new BufferFormatTestParam(
480                         ImageFormat.JPEG, /*repeating*/true);
481                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
482                 params.mUseColorSpace = true;
483                 bufferFormatTestByCamera(params);
484             } finally {
485                 closeDevice(id);
486             }
487         }
488     }
489 
490     @Test
testSRGBJpeg()491     public void testSRGBJpeg() throws Exception {
492         for (String id : getCameraIdsUnderTest()) {
493             try {
494                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
495                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
496                     continue;
497                 }
498                 Set<ColorSpace.Named> availableColorSpaces =
499                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.JPEG);
500 
501                 if (!availableColorSpaces.contains(ColorSpace.Named.SRGB)) {
502                     continue;
503                 }
504 
505                 openDevice(id);
506                 Log.v(TAG, "Testing sRGB JPEG capture for Camera " + id);
507                 BufferFormatTestParam params = new BufferFormatTestParam(
508                         ImageFormat.JPEG, /*repeating*/false);
509                 params.mColorSpace = ColorSpace.Named.SRGB;
510                 params.mUseColorSpace = true;
511                 bufferFormatTestByCamera(params);
512             } finally {
513                 closeDevice(id);
514             }
515         }
516     }
517 
518     @Test
testSRGBJpegRepeating()519     public void testSRGBJpegRepeating() throws Exception {
520         for (String id : getCameraIdsUnderTest()) {
521             try {
522                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
523                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
524                     continue;
525                 }
526                 Set<ColorSpace.Named> availableColorSpaces =
527                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.JPEG);
528 
529                 if (!availableColorSpaces.contains(ColorSpace.Named.SRGB)) {
530                     continue;
531                 }
532 
533                 openDevice(id);
534                 Log.v(TAG, "Testing repeating sRGB JPEG capture for Camera " + id);
535                 BufferFormatTestParam params = new BufferFormatTestParam(
536                         ImageFormat.JPEG, /*repeating*/true);
537                 params.mColorSpace = ColorSpace.Named.SRGB;
538                 params.mUseColorSpace = true;
539                 bufferFormatTestByCamera(params);
540             } finally {
541                 closeDevice(id);
542             }
543         }
544     }
545 
546     @Test
testJpegR()547     public void testJpegR() throws Exception {
548         for (String id : getCameraIdsUnderTest()) {
549             try {
550                 if (!mAllStaticInfo.get(id).isJpegRSupported()) {
551                     Log.i(TAG, "Camera " + id + " does not support Jpeg/R, skipping");
552                     continue;
553                 }
554                 Log.v(TAG, "Testing Jpeg/R capture for Camera " + id);
555 
556                 openDevice(id);
557                 BufferFormatTestParam params = new BufferFormatTestParam(
558                         ImageFormat.JPEG_R, /*repeating*/false);
559                 bufferFormatTestByCamera(params);
560             } finally {
561                 closeDevice(id);
562             }
563         }
564     }
565 
566     @Test
testJpegRDisplayP3()567     public void testJpegRDisplayP3() throws Exception {
568         for (String id : getCameraIdsUnderTest()) {
569             try {
570                 if (!mAllStaticInfo.get(id).isJpegRSupported()) {
571                     Log.i(TAG, "Camera " + id + " does not support Jpeg/R, skipping");
572                     continue;
573                 }
574 
575                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
576                         .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
577                     continue;
578                 }
579                 Set<ColorSpace.Named> availableColorSpaces =
580                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(
581                                 ImageFormat.JPEG_R);
582 
583                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
584                     continue;
585                 }
586                 openDevice(id);
587                 Log.v(TAG, "Testing Display P3 Jpeg/R capture for Camera " + id);
588                 BufferFormatTestParam params = new BufferFormatTestParam(
589                         ImageFormat.JPEG_R, /*repeating*/false);
590                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
591                 params.mUseColorSpace = true;
592                 if (mStaticInfo.isCapabilitySupported(CameraCharacteristics
593                         .REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) {
594                     params.mDynamicRangeProfile = DynamicRangeProfiles.HLG10;
595                 }
596                 bufferFormatTestByCamera(params);
597             } finally {
598                 closeDevice(id);
599             }
600         }
601     }
602 
603     @Test
testHeic()604     public void testHeic() throws Exception {
605         for (String id : getCameraIdsUnderTest()) {
606             try {
607                 Log.v(TAG, "Testing heic capture for Camera " + id);
608                 openDevice(id);
609                 BufferFormatTestParam params = new BufferFormatTestParam(
610                         ImageFormat.HEIC, /*repeating*/false);
611                 bufferFormatTestByCamera(params);
612             } finally {
613                 closeDevice(id);
614             }
615         }
616     }
617 
618     @Test
619     @RequiresFlagsEnabled(Flags.FLAG_CAMERA_HEIF_GAINMAP)
testHeicUltraHdr()620     public void testHeicUltraHdr() throws Exception {
621         for (String id : getCameraIdsUnderTest()) {
622             if (!mAllStaticInfo.get(id).isHeicUltraHdrSupported()) {
623                 Log.i(TAG, "Camera " + id + " does not support HEIC_ULTRAHDR, skipping");
624                 continue;
625             }
626 
627             try {
628                 Log.v(TAG, "Testing HEIC_ULTRAHDR capture for Camera " + id);
629                 openDevice(id);
630                 BufferFormatTestParam params = new BufferFormatTestParam(
631                         ImageFormat.HEIC_ULTRAHDR, /*repeating*/false);
632                 params.mUseDataSpace = true;
633                 params.mDataSpace = DataSpace.DATASPACE_HEIF_ULTRAHDR;
634                 params.mHardwareBufferFormat = HardwareBuffer.BLOB;
635                 bufferFormatTestByCamera(params);
636             } finally {
637                 closeDevice(id);
638             }
639         }
640     }
641 
642     @Test
testRepeatingJpeg()643     public void testRepeatingJpeg() throws Exception {
644         for (String id : getCameraIdsUnderTest()) {
645             try {
646                 Log.v(TAG, "Testing repeating jpeg capture for Camera " + id);
647                 openDevice(id);
648                 BufferFormatTestParam params = new BufferFormatTestParam(
649                         ImageFormat.JPEG, /*repeating*/true);
650                 bufferFormatTestByCamera(params);
651             } finally {
652                 closeDevice(id);
653             }
654         }
655     }
656 
657     @Test
testRepeatingRaw()658     public void testRepeatingRaw() throws Exception {
659         for (String id : getCameraIdsUnderTest()) {
660             try {
661                 Log.v(TAG, "Testing repeating raw capture for camera " + id);
662                 openDevice(id);
663                 BufferFormatTestParam params = new BufferFormatTestParam(
664                         ImageFormat.RAW_SENSOR, /*repeating*/true);
665                 bufferFormatTestByCamera(params);
666             } finally {
667                 closeDevice(id);
668             }
669         }
670     }
671 
672     @Test
testRepeatingRawPrivate()673     public void testRepeatingRawPrivate() throws Exception {
674         for (String id : getCameraIdsUnderTest()) {
675             try {
676                 Log.v(TAG, "Testing repeating raw capture for camera " + id);
677                 openDevice(id);
678                 BufferFormatTestParam params = new BufferFormatTestParam(
679                         ImageFormat.RAW_PRIVATE, /*repeating*/true);
680                 bufferFormatTestByCamera(params);
681             } finally {
682                 closeDevice(id);
683             }
684         }
685     }
686 
687     @Test
testRepeatingHeic()688     public void testRepeatingHeic() throws Exception {
689         for (String id : getCameraIdsUnderTest()) {
690             try {
691                 Log.v(TAG, "Testing repeating heic capture for Camera " + id);
692                 openDevice(id);
693                 BufferFormatTestParam params = new BufferFormatTestParam(
694                         ImageFormat.HEIC, /*repeating*/true);
695                 bufferFormatTestByCamera(params);
696             } finally {
697                 closeDevice(id);
698             }
699         }
700     }
701 
702     @Test
testFlexibleYuvWithTimestampBase()703     public void testFlexibleYuvWithTimestampBase() throws Exception {
704         for (String id : getCameraIdsUnderTest()) {
705             try {
706                 Log.i(TAG, "Testing Camera " + id);
707                 openDevice(id);
708 
709                 BufferFormatTestParam params = new BufferFormatTestParam(
710                         ImageFormat.YUV_420_888, /*repeating*/true);
711                 params.mValidateImageData = false;
712                 int[] timeBases = {OutputConfiguration.TIMESTAMP_BASE_SENSOR,
713                         OutputConfiguration.TIMESTAMP_BASE_MONOTONIC,
714                         OutputConfiguration.TIMESTAMP_BASE_REALTIME,
715                         OutputConfiguration.TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED};
716                 for (int timeBase : timeBases) {
717                     params.mTimestampBase = timeBase;
718                     bufferFormatTestByCamera(params);
719                 }
720             } finally {
721                 closeDevice(id);
722             }
723         }
724     }
725 
726     @Test
testLongProcessingRepeatingRaw()727     public void testLongProcessingRepeatingRaw() throws Exception {
728         for (String id : getCameraIdsUnderTest()) {
729             try {
730                 Log.v(TAG, "Testing long processing on repeating raw for camera " + id);
731 
732                 if (!mAllStaticInfo.get(id).isCapabilitySupported(
733                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
734                     continue;
735                 }
736                 openDevice(id);
737 
738                 bufferFormatLongProcessingTimeTestByCamera(ImageFormat.RAW_SENSOR);
739             } finally {
740                 closeDevice(id);
741             }
742         }
743     }
744 
745     @Test
testLongProcessingRepeatingFlexibleYuv()746     public void testLongProcessingRepeatingFlexibleYuv() throws Exception {
747         for (String id : getCameraIdsUnderTest()) {
748             try {
749                 Log.v(TAG, "Testing long processing on repeating YUV for camera " + id);
750 
751                 if (!mAllStaticInfo.get(id).isCapabilitySupported(
752                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
753                     continue;
754                 }
755 
756                 openDevice(id);
757                 bufferFormatLongProcessingTimeTestByCamera(ImageFormat.YUV_420_888);
758             } finally {
759                 closeDevice(id);
760             }
761         }
762     }
763 
764     /**
765      * Test invalid access of image after an image is closed, further access
766      * of the image will get an IllegalStateException. The basic assumption of
767      * this test is that the ImageReader always gives direct byte buffer, which is always true
768      * for camera case. For if the produced image byte buffer is not direct byte buffer, there
769      * is no guarantee to get an ISE for this invalid access case.
770      */
771     @Test
testInvalidAccessTest()772     public void testInvalidAccessTest() throws Exception {
773         // Test byte buffer access after an image is released, it should throw ISE.
774         for (String id : getCameraIdsUnderTest()) {
775             try {
776                 Log.v(TAG, "Testing invalid image access for Camera " + id);
777                 openDevice(id);
778                 invalidAccessTestAfterClose();
779             } finally {
780                 closeDevice(id);
781                 closeDefaultImageReader();
782             }
783         }
784     }
785 
786     /**
787      * Test two image stream (YUV420_888 and JPEG) capture by using ImageReader.
788      *
789      * <p>Both stream formats are mandatory for Camera2 API</p>
790      */
791     @Test
testYuvAndJpeg()792     public void testYuvAndJpeg() throws Exception {
793         for (String id : getCameraIdsUnderTest()) {
794             try {
795                 Log.v(TAG, "YUV and JPEG testing for camera " + id);
796                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
797                     Log.i(TAG, "Camera " + id +
798                             " does not support color outputs, skipping");
799                     continue;
800                 }
801                 openDevice(id);
802                 bufferFormatWithYuvTestByCamera(ImageFormat.JPEG);
803             } finally {
804                 closeDevice(id);
805             }
806         }
807     }
808 
809     /**
810      * Test two image stream (YUV420_888 and JPEG) capture by using ImageReader with the ImageReader
811      * factory method that has usage flag argument.
812      *
813      * <p>Both stream formats are mandatory for Camera2 API</p>
814      */
815     @Test
testYuvAndJpegWithUsageFlag()816     public void testYuvAndJpegWithUsageFlag() throws Exception {
817         for (String id : getCameraIdsUnderTest()) {
818             try {
819                 Log.v(TAG, "YUV and JPEG testing for camera " + id);
820                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
821                     Log.i(TAG, "Camera " + id +
822                             " does not support color outputs, skipping");
823                     continue;
824                 }
825                 openDevice(id);
826                 bufferFormatWithYuvTestByCamera(ImageFormat.JPEG, true);
827             } finally {
828                 closeDevice(id);
829             }
830         }
831     }
832 
833     @Test
testImageReaderBuilderSetHardwareBufferFormatAndDataSpace()834     public void testImageReaderBuilderSetHardwareBufferFormatAndDataSpace() throws Exception {
835         long usage = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | HardwareBuffer.USAGE_GPU_COLOR_OUTPUT;
836         try (
837             ImageReader reader = new ImageReader
838                 .Builder(20, 45)
839                 .setMaxImages(2)
840                 .setDefaultHardwareBufferFormat(HardwareBuffer.RGBA_8888)
841                 .setDefaultDataSpace(DataSpace.DATASPACE_BT709)
842                 .setUsage(usage)
843                 .build();
844             ImageWriter writer = ImageWriter.newInstance(reader.getSurface(), 1);
845             Image outputImage = writer.dequeueInputImage()
846         ) {
847             assertEquals(2, reader.getMaxImages());
848             assertEquals(usage, reader.getUsage());
849             assertEquals(HardwareBuffer.RGBA_8888, reader.getHardwareBufferFormat());
850 
851             assertEquals(20, outputImage.getWidth());
852             assertEquals(45, outputImage.getHeight());
853             assertEquals(HardwareBuffer.RGBA_8888, outputImage.getFormat());
854         }
855     }
856 
857     @Test
testImageReaderBuilderWithBLOBAndHEIF()858     public void testImageReaderBuilderWithBLOBAndHEIF() throws Exception {
859         long usage = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | HardwareBuffer.USAGE_GPU_COLOR_OUTPUT;
860         try (
861             ImageReader reader = new ImageReader
862                 .Builder(20, 45)
863                 .setMaxImages(2)
864                 .setDefaultHardwareBufferFormat(HardwareBuffer.BLOB)
865                 .setDefaultDataSpace(DataSpace.DATASPACE_HEIF)
866                 .setUsage(usage)
867                 .build();
868             ImageWriter writer = new ImageWriter.Builder(reader.getSurface()).build();
869         ) {
870             assertEquals(2, reader.getMaxImages());
871             assertEquals(usage, reader.getUsage());
872             assertEquals(HardwareBuffer.BLOB, reader.getHardwareBufferFormat());
873             assertEquals(DataSpace.DATASPACE_HEIF, reader.getDataSpace());
874             // writer should have same dataspace/hardwarebuffer format as reader.
875             assertEquals(HardwareBuffer.BLOB, writer.getHardwareBufferFormat());
876             assertEquals(DataSpace.DATASPACE_HEIF, writer.getDataSpace());
877             // HEIC is the combination of HardwareBuffer.BLOB and Dataspace.DATASPACE_HEIF
878             assertEquals(ImageFormat.HEIC, writer.getFormat());
879         }
880     }
881 
882     @Test
883     @RequiresFlagsEnabled(Flags.FLAG_CAMERA_HEIF_GAINMAP)
testImageReaderBuilderWithBLOBAndHeicUltraHdr()884     public void testImageReaderBuilderWithBLOBAndHeicUltraHdr() throws Exception {
885         long usage = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | HardwareBuffer.USAGE_GPU_COLOR_OUTPUT;
886         try (
887                 ImageReader reader = new ImageReader
888                         .Builder(20, 45)
889                         .setMaxImages(2)
890                         .setDefaultHardwareBufferFormat(HardwareBuffer.BLOB)
891                         .setDefaultDataSpace(DataSpace.DATASPACE_HEIF_ULTRAHDR)
892                         .setUsage(usage)
893                         .build();
894                 ImageWriter writer = new ImageWriter.Builder(reader.getSurface()).build();
895         ) {
896             assertEquals(2, reader.getMaxImages());
897             assertEquals(usage, reader.getUsage());
898             assertEquals(HardwareBuffer.BLOB, reader.getHardwareBufferFormat());
899             assertEquals(DataSpace.DATASPACE_HEIF_ULTRAHDR, reader.getDataSpace());
900             // writer should have same dataspace/hardwarebuffer format as reader.
901             assertEquals(HardwareBuffer.BLOB, writer.getHardwareBufferFormat());
902             assertEquals(DataSpace.DATASPACE_HEIF_ULTRAHDR, writer.getDataSpace());
903             // HEIC_ULTRAHDR is the combination of HardwareBuffer.BLOB and
904             // Dataspace.DATASPACE_HEIF_ULTRAHDR
905             assertEquals(ImageFormat.HEIC_ULTRAHDR, writer.getFormat());
906         }
907     }
908 
909     @Test
testImageReaderBuilderWithBLOBAndJpegR()910     public void testImageReaderBuilderWithBLOBAndJpegR() throws Exception {
911         long usage = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | HardwareBuffer.USAGE_GPU_COLOR_OUTPUT;
912         try (
913                 ImageReader reader = new ImageReader
914                         .Builder(20, 45)
915                         .setMaxImages(2)
916                         .setDefaultHardwareBufferFormat(HardwareBuffer.BLOB)
917                         .setDefaultDataSpace(DataSpace.DATASPACE_JPEG_R)
918                         .setUsage(usage)
919                         .build();
920                 ImageWriter writer = new ImageWriter.Builder(reader.getSurface()).build();
921         ) {
922             assertEquals(2, reader.getMaxImages());
923             assertEquals(usage, reader.getUsage());
924             assertEquals(HardwareBuffer.BLOB, reader.getHardwareBufferFormat());
925             assertEquals(DataSpace.DATASPACE_JPEG_R, reader.getDataSpace());
926             // writer should have same dataspace/hardwarebuffer format as reader.
927             assertEquals(HardwareBuffer.BLOB, writer.getHardwareBufferFormat());
928             assertEquals(DataSpace.DATASPACE_JPEG_R, writer.getDataSpace());
929             // Jpeg/R is the combination of HardwareBuffer.BLOB and Dataspace.DATASPACE_JPEG_R
930             assertEquals(ImageFormat.JPEG_R, writer.getFormat());
931         }
932     }
933 
934     @Test
testImageReaderBuilderWithBLOBAndJFIF()935     public void testImageReaderBuilderWithBLOBAndJFIF() throws Exception {
936         long usage = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | HardwareBuffer.USAGE_GPU_COLOR_OUTPUT;
937         try (
938             ImageReader reader = new ImageReader
939                 .Builder(20, 45)
940                 .setMaxImages(2)
941                 .setDefaultHardwareBufferFormat(HardwareBuffer.BLOB)
942                 .setDefaultDataSpace(DataSpace.DATASPACE_JFIF)
943                 .setUsage(usage)
944                 .build();
945             ImageWriter writer = new ImageWriter.Builder(reader.getSurface()).build();
946         ) {
947             assertEquals(2, reader.getMaxImages());
948             assertEquals(usage, reader.getUsage());
949             assertEquals(HardwareBuffer.BLOB, reader.getHardwareBufferFormat());
950             assertEquals(DataSpace.DATASPACE_JFIF, reader.getDataSpace());
951             // writer should have same dataspace/hardwarebuffer format as reader.
952             assertEquals(HardwareBuffer.BLOB, writer.getHardwareBufferFormat());
953             assertEquals(DataSpace.DATASPACE_JFIF, writer.getDataSpace());
954             // JPEG is the combination of HardwareBuffer.BLOB and Dataspace.DATASPACE_JFIF
955             assertEquals(ImageFormat.JPEG, writer.getFormat());
956         }
957     }
958 
959     @Test
testImageReaderBuilderImageFormatOverride()960     public void testImageReaderBuilderImageFormatOverride() throws Exception {
961         try (
962             ImageReader reader = new ImageReader
963                 .Builder(20, 45)
964                 .setImageFormat(ImageFormat.HEIC)
965                 .setDefaultHardwareBufferFormat(HardwareBuffer.RGB_888)
966                 .setDefaultDataSpace(DataSpace.DATASPACE_BT709)
967                 .build();
968             ImageWriter writer = ImageWriter.newInstance(reader.getSurface(), 1);
969             Image outputImage = writer.dequeueInputImage()
970         ) {
971             assertEquals(1, reader.getMaxImages());
972             assertEquals(HardwareBuffer.USAGE_CPU_READ_OFTEN, reader.getUsage());
973             assertEquals(HardwareBuffer.RGB_888, reader.getHardwareBufferFormat());
974             assertEquals(DataSpace.DATASPACE_BT709, reader.getDataSpace());
975 
976             assertEquals(20, outputImage.getWidth());
977             assertEquals(45, outputImage.getHeight());
978             assertEquals(HardwareBuffer.RGB_888, outputImage.getFormat());
979         }
980     }
981 
982     @Test
testImageReaderBuilderSetImageFormat()983     public void testImageReaderBuilderSetImageFormat() throws Exception {
984         try (
985             ImageReader reader = new ImageReader
986                 .Builder(20, 45)
987                 .setMaxImages(2)
988                 .setImageFormat(ImageFormat.YUV_420_888)
989                 .build();
990             ImageWriter writer = ImageWriter.newInstance(reader.getSurface(), 1);
991             Image outputImage = writer.dequeueInputImage()
992         ) {
993             assertEquals(2, reader.getMaxImages());
994             assertEquals(ImageFormat.YUV_420_888, reader.getImageFormat());
995             assertEquals(HardwareBuffer.USAGE_CPU_READ_OFTEN, reader.getUsage());
996             // ImageFormat.YUV_420_888 hal dataspace is DATASPACE_JFIF
997             assertEquals(DataSpace.DATASPACE_JFIF, reader.getDataSpace());
998 
999             // writer should retrieve all info from reader's surface
1000             assertEquals(DataSpace.DATASPACE_JFIF, writer.getDataSpace());
1001             assertEquals(HardwareBuffer.YCBCR_420_888, writer.getHardwareBufferFormat());
1002 
1003             assertEquals(20, outputImage.getWidth());
1004             assertEquals(45, outputImage.getHeight());
1005             assertEquals(ImageFormat.YUV_420_888, outputImage.getFormat());
1006         }
1007     }
1008 
1009     /**
1010      * Test two image stream (YUV420_888 and RAW_SENSOR) capture by using ImageReader.
1011      *
1012      */
1013     @Test
testImageReaderYuvAndRaw()1014     public void testImageReaderYuvAndRaw() throws Exception {
1015         for (String id : getCameraIdsUnderTest()) {
1016             try {
1017                 Log.v(TAG, "YUV and RAW testing for camera " + id);
1018                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
1019                     Log.i(TAG, "Camera " + id +
1020                             " does not support color outputs, skipping");
1021                     continue;
1022                 }
1023                 openDevice(id);
1024                 bufferFormatWithYuvTestByCamera(ImageFormat.RAW_SENSOR);
1025             } finally {
1026                 closeDevice(id);
1027             }
1028         }
1029     }
1030 
1031     /**
1032      * Test two image stream (YUV420_888 and PRIVATE) capture by using ImageReader.
1033      */
1034     @Test
testImageReaderYuvAndPrivate()1035     public void testImageReaderYuvAndPrivate() throws Exception {
1036         for (String id : getCameraIdsUnderTest()) {
1037             try {
1038                 Log.v(TAG, "YUV and PRIVATE testing for camera " + id);
1039                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
1040                     Log.i(TAG, "Camera " + id
1041                             + " does not support color outputs, skipping");
1042                     continue;
1043                 }
1044                 openDevice(id);
1045                 // YUV PREVIEW + PRIVATE PREVIEW is a mandatory legacy stream combination.
1046                 bufferFormatWithYuvTestByCamera(ImageFormat.PRIVATE,
1047                         /*setUsageFlag*/false, /*useYuvSize*/true);
1048             } finally {
1049                 closeDevice(id);
1050             }
1051         }
1052     }
1053 
1054     /**
1055      * If the camera device advertises the SECURE_IAMGE_DATA capability, test
1056      * ImageFormat.PRIVATE + PROTECTED usage capture by using ImageReader with the
1057      * ImageReader factory method that has usage flag argument, and uses a custom usage flag.
1058      */
1059     @Test
testImageReaderPrivateWithProtectedUsageFlag()1060     public void testImageReaderPrivateWithProtectedUsageFlag() throws Exception {
1061         Set<Pair<String, String>> unavailablePhysicalCameras = getUnavailablePhysicalCameras(
1062                 mCameraManager, mHandler);
1063         for (String id : getCameraIdsUnderTest()) {
1064             try {
1065                 Log.v(TAG, "Private format and protected usage testing for camera " + id);
1066                 List<String> testCameraIds = new ArrayList<>();
1067 
1068                 if (mAllStaticInfo.get(id).isCapabilitySupported(
1069                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA)) {
1070                     // Test the camera id without using physical camera
1071                     testCameraIds.add(null);
1072                 }
1073 
1074                 if (mAllStaticInfo.get(id).isLogicalMultiCamera()) {
1075                     Set<String> physicalIdsSet =
1076                             mAllStaticInfo.get(id).getCharacteristics().getPhysicalCameraIds();
1077                     for (String physicalId : physicalIdsSet) {
1078                         StaticMetadata phyInfo = mAllStaticInfo.get(physicalId);
1079                         boolean isUnavailable =
1080                                 unavailablePhysicalCameras.contains(new Pair<>(id, physicalId));
1081                         if (phyInfo.isCapabilitySupported(CameraCharacteristics
1082                                 .REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA)
1083                                 && !isUnavailable) {
1084                             testCameraIds.add(physicalId);
1085                         }
1086                     }
1087                 }
1088 
1089                 if (testCameraIds.isEmpty()) {
1090                     Log.i(TAG, "Camera " + id +
1091                             " does not support secure image data capability, skipping");
1092                     continue;
1093                 }
1094                 openDevice(id);
1095 
1096 
1097                 BufferFormatTestParam params = new BufferFormatTestParam(
1098                         ImageFormat.PRIVATE, /*repeating*/true);
1099                 params.mSetUsageFlag = true;
1100                 params.mUsageFlag = HardwareBuffer.USAGE_PROTECTED_CONTENT;
1101                 params.mRepeating = true;
1102                 params.mCheckSession = true;
1103                 params.mValidateImageData = false;
1104                 for (String testCameraId : testCameraIds) {
1105                     params.mPhysicalId = testCameraId;
1106                     bufferFormatTestByCamera(params);
1107                 }
1108             } finally {
1109                 closeDevice(id);
1110             }
1111         }
1112     }
1113 
1114     /**
1115      * Test two image stream (YUV420_888 and RAW_SENSOR) capture by using ImageReader with the
1116      * ImageReader factory method that has usage flag argument.
1117      *
1118      */
1119     @Test
testImageReaderYuvAndRawWithUsageFlag()1120     public void testImageReaderYuvAndRawWithUsageFlag() throws Exception {
1121         for (String id : getCameraIdsUnderTest()) {
1122             try {
1123                 Log.v(TAG, "YUV and RAW testing for camera " + id);
1124                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
1125                     Log.i(TAG, "Camera " + id +
1126                             " does not support color outputs, skipping");
1127                     continue;
1128                 }
1129                 openDevice(id);
1130                 bufferFormatWithYuvTestByCamera(ImageFormat.RAW_SENSOR, true);
1131             } finally {
1132                 closeDevice(id);
1133             }
1134         }
1135     }
1136 
1137     /**
1138      * Check that the center patches for YUV and JPEG outputs for the same frame match for each YUV
1139      * resolution and format supported.
1140      */
1141     @Test
testAllOutputYUVResolutions()1142     public void testAllOutputYUVResolutions() throws Exception {
1143         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
1144                 BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
1145         for (String id : getCameraIdsUnderTest()) {
1146             try {
1147                 Log.v(TAG, "Testing all YUV image resolutions for camera " + id);
1148 
1149                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
1150                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
1151                     continue;
1152                 }
1153 
1154                 openDevice(id);
1155                 // Skip warmup on FULL mode devices.
1156                 int warmupCaptureNumber = (mStaticInfo.isHardwareLevelLegacy()) ?
1157                         MAX_NUM_IMAGES - 1 : 0;
1158 
1159                 // NV21 isn't supported by ImageReader.
1160                 final int[] YUVFormats = new int[] {ImageFormat.YUV_420_888, ImageFormat.YV12};
1161 
1162                 CameraCharacteristics.Key<StreamConfigurationMap> key =
1163                         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
1164                 StreamConfigurationMap config = mStaticInfo.getValueFromKeyNonNull(key);
1165                 int[] supportedFormats = config.getOutputFormats();
1166                 List<Integer> supportedYUVFormats = new ArrayList<>();
1167                 for (int format : YUVFormats) {
1168                     if (CameraTestUtils.contains(supportedFormats, format)) {
1169                         supportedYUVFormats.add(format);
1170                     }
1171                 }
1172 
1173                 Size[] jpegSizes = mStaticInfo.getAvailableSizesForFormatChecked(ImageFormat.JPEG,
1174                         StaticMetadata.StreamDirection.Output);
1175                 assertFalse("JPEG output not supported for camera " + id +
1176                         ", at least one JPEG output is required.", jpegSizes.length == 0);
1177 
1178                 Size maxJpegSize = CameraTestUtils.getMaxSize(jpegSizes);
1179                 Size maxPreviewSize = mOrderedPreviewSizes.get(0);
1180                 Size QCIF = new Size(176, 144);
1181                 Size FULL_HD = new Size(1920, 1080);
1182                 for (int format : supportedYUVFormats) {
1183                     Size[] targetCaptureSizes =
1184                             mStaticInfo.getAvailableSizesForFormatChecked(format,
1185                             StaticMetadata.StreamDirection.Output);
1186 
1187                     for (Size captureSz : targetCaptureSizes) {
1188                         if (VERBOSE) {
1189                             Log.v(TAG, "Testing yuv size " + captureSz + " and jpeg size "
1190                                     + maxJpegSize + " for camera " + mCamera.getId());
1191                         }
1192 
1193                         ImageReader jpegReader = null;
1194                         ImageReader yuvReader = null;
1195                         try {
1196                             // Create YUV image reader
1197                             SimpleImageReaderListener yuvListener = new SimpleImageReaderListener();
1198                             yuvReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
1199                                     yuvListener);
1200                             Surface yuvSurface = yuvReader.getSurface();
1201 
1202                             // Create JPEG image reader
1203                             SimpleImageReaderListener jpegListener =
1204                                     new SimpleImageReaderListener();
1205                             jpegReader = createImageReader(maxJpegSize,
1206                                     ImageFormat.JPEG, MAX_NUM_IMAGES, jpegListener);
1207                             Surface jpegSurface = jpegReader.getSurface();
1208 
1209                             // Setup session
1210                             List<Surface> outputSurfaces = new ArrayList<Surface>();
1211                             outputSurfaces.add(yuvSurface);
1212                             outputSurfaces.add(jpegSurface);
1213                             createSession(outputSurfaces);
1214 
1215                             int state = mCameraSessionListener.getStateWaiter().waitForAnyOfStates(
1216                                         Arrays.asList(sessionStates),
1217                                         CameraTestUtils.SESSION_CONFIGURE_TIMEOUT_MS);
1218 
1219                             if (state == BlockingSessionCallback.SESSION_CONFIGURE_FAILED) {
1220                                 if (captureSz.getWidth() > maxPreviewSize.getWidth() ||
1221                                         captureSz.getHeight() > maxPreviewSize.getHeight()) {
1222                                     Log.v(TAG, "Skip testing {yuv:" + captureSz
1223                                             + " ,jpeg:" + maxJpegSize + "} for camera "
1224                                             + mCamera.getId() +
1225                                             " because full size jpeg + yuv larger than "
1226                                             + "max preview size (" + maxPreviewSize
1227                                             + ") is not supported");
1228                                     continue;
1229                                 } else if (captureSz.equals(QCIF) &&
1230                                         ((maxJpegSize.getWidth() > FULL_HD.getWidth()) ||
1231                                          (maxJpegSize.getHeight() > FULL_HD.getHeight()))) {
1232                                     Log.v(TAG, "Skip testing {yuv:" + captureSz
1233                                             + " ,jpeg:" + maxJpegSize + "} for camera "
1234                                             + mCamera.getId() +
1235                                             " because QCIF + >Full_HD size is not supported");
1236                                     continue;
1237                                 } else {
1238                                     fail("Camera " + mCamera.getId() +
1239                                             ":session configuration failed for {jpeg: " +
1240                                             maxJpegSize + ", yuv: " + captureSz + "}");
1241                                 }
1242                             }
1243 
1244                             // Warm up camera preview (mainly to give legacy devices time to do 3A).
1245                             CaptureRequest.Builder warmupRequest =
1246                                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1247                             warmupRequest.addTarget(yuvSurface);
1248                             assertNotNull("Fail to get CaptureRequest.Builder", warmupRequest);
1249                             SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1250 
1251                             for (int i = 0; i < warmupCaptureNumber; i++) {
1252                                 startCapture(warmupRequest.build(), /*repeating*/false,
1253                                         resultListener, mHandler);
1254                             }
1255                             for (int i = 0; i < warmupCaptureNumber; i++) {
1256                                 resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
1257                                 Image image = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
1258                                 image.close();
1259                             }
1260 
1261                             // Capture image.
1262                             CaptureRequest.Builder mainRequest =
1263                                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1264                             for (Surface s : outputSurfaces) {
1265                                 mainRequest.addTarget(s);
1266                             }
1267 
1268                             startCapture(mainRequest.build(), /*repeating*/false, resultListener,
1269                                     mHandler);
1270 
1271                             // Verify capture result and images
1272                             resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
1273 
1274                             Image yuvImage = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
1275                             Image jpegImage = jpegListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
1276 
1277                             //Validate captured images.
1278                             CameraTestUtils.validateImage(yuvImage, captureSz.getWidth(),
1279                                     captureSz.getHeight(), format, /*filePath*/null);
1280                             CameraTestUtils.validateImage(jpegImage, maxJpegSize.getWidth(),
1281                                     maxJpegSize.getHeight(), ImageFormat.JPEG, /*filePath*/null);
1282 
1283                             // Compare the image centers.
1284                             RectF jpegDimens = new RectF(0, 0, jpegImage.getWidth(),
1285                                     jpegImage.getHeight());
1286                             RectF yuvDimens = new RectF(0, 0, yuvImage.getWidth(),
1287                                     yuvImage.getHeight());
1288 
1289                             // Find scale difference between YUV and JPEG output
1290                             Matrix m = new Matrix();
1291                             m.setRectToRect(yuvDimens, jpegDimens, Matrix.ScaleToFit.START);
1292                             RectF scaledYuv = new RectF();
1293                             m.mapRect(scaledYuv, yuvDimens);
1294                             float scale = scaledYuv.width() / yuvDimens.width();
1295 
1296                             final int PATCH_DIMEN = 40; // pixels in YUV
1297 
1298                             // Find matching square patch of pixels in YUV and JPEG output
1299                             RectF tempPatch = new RectF(0, 0, PATCH_DIMEN, PATCH_DIMEN);
1300                             tempPatch.offset(yuvDimens.centerX() - tempPatch.centerX(),
1301                                     yuvDimens.centerY() - tempPatch.centerY());
1302                             Rect yuvPatch = new Rect();
1303                             tempPatch.roundOut(yuvPatch);
1304 
1305                             tempPatch.set(0, 0, PATCH_DIMEN * scale, PATCH_DIMEN * scale);
1306                             tempPatch.offset(jpegDimens.centerX() - tempPatch.centerX(),
1307                                     jpegDimens.centerY() - tempPatch.centerY());
1308                             Rect jpegPatch = new Rect();
1309                             tempPatch.roundOut(jpegPatch);
1310 
1311                             // Decode center patches
1312                             int[] yuvColors = convertPixelYuvToRgba(yuvPatch.width(),
1313                                     yuvPatch.height(), yuvPatch.left, yuvPatch.top, yuvImage);
1314                             Bitmap yuvBmap = Bitmap.createBitmap(yuvColors, yuvPatch.width(),
1315                                     yuvPatch.height(), Bitmap.Config.ARGB_8888);
1316 
1317                             byte[] compressedJpegData = CameraTestUtils.getDataFromImage(jpegImage);
1318                             BitmapRegionDecoder decoder = BitmapRegionDecoder.newInstance(
1319                                     compressedJpegData, /*offset*/0, compressedJpegData.length,
1320                                     /*isShareable*/true);
1321                             BitmapFactory.Options opt = new BitmapFactory.Options();
1322                             opt.inPreferredConfig = Bitmap.Config.ARGB_8888;
1323                             Bitmap fullSizeJpegBmap = decoder.decodeRegion(jpegPatch, opt);
1324                             Bitmap jpegBmap = Bitmap.createScaledBitmap(fullSizeJpegBmap,
1325                                     yuvPatch.width(), yuvPatch.height(), /*filter*/true);
1326 
1327                             // Compare two patches using average of per-pixel differences
1328                             double difference = BitmapUtils.calcDifferenceMetric(yuvBmap, jpegBmap);
1329                             double tolerance = IMAGE_DIFFERENCE_TOLERANCE;
1330                             if (mStaticInfo.isHardwareLevelLegacy()) {
1331                                 tolerance = IMAGE_DIFFERENCE_TOLERANCE_LEGACY;
1332                             }
1333                             Log.i(TAG, "Difference for resolution " + captureSz + " is: " +
1334                                     difference);
1335                             if (difference > tolerance) {
1336                                 // Dump files if running in verbose mode
1337                                 if (DEBUG) {
1338                                     String jpegFileName = mDebugFileNameBase + "/" + captureSz +
1339                                             "_jpeg.jpg";
1340                                     dumpFile(jpegFileName, jpegBmap);
1341                                     String fullSizeJpegFileName = mDebugFileNameBase + "/" +
1342                                             captureSz + "_full_jpeg.jpg";
1343                                     dumpFile(fullSizeJpegFileName, compressedJpegData);
1344                                     String yuvFileName = mDebugFileNameBase + "/" + captureSz +
1345                                             "_yuv.jpg";
1346                                     dumpFile(yuvFileName, yuvBmap);
1347                                     String fullSizeYuvFileName = mDebugFileNameBase + "/" +
1348                                             captureSz + "_full_yuv.jpg";
1349                                     int[] fullYUVColors = convertPixelYuvToRgba(yuvImage.getWidth(),
1350                                             yuvImage.getHeight(), 0, 0, yuvImage);
1351                                     Bitmap fullYUVBmap = Bitmap.createBitmap(fullYUVColors,
1352                                             yuvImage.getWidth(), yuvImage.getHeight(),
1353                                             Bitmap.Config.ARGB_8888);
1354                                     dumpFile(fullSizeYuvFileName, fullYUVBmap);
1355                                 }
1356                                 fail("Camera " + mCamera.getId() + ": YUV image at capture size "
1357                                         + captureSz + " and JPEG image at capture size "
1358                                         + maxJpegSize + " for the same frame are not similar,"
1359                                         + " center patches have difference metric of "
1360                                         + difference + ", tolerance is " + tolerance);
1361                             }
1362 
1363                             // Stop capture, delete the streams.
1364                             stopCapture(/*fast*/false);
1365                             yuvImage.close();
1366                             jpegImage.close();
1367                             yuvListener.drain();
1368                             jpegListener.drain();
1369                         } finally {
1370                             closeImageReader(jpegReader);
1371                             jpegReader = null;
1372                             closeImageReader(yuvReader);
1373                             yuvReader = null;
1374                         }
1375                     }
1376                 }
1377 
1378             } finally {
1379                 closeDevice(id);
1380             }
1381         }
1382     }
1383 
1384     /**
1385      * Test that images captured after discarding free buffers are valid.
1386      */
1387     @Test
testDiscardFreeBuffers()1388     public void testDiscardFreeBuffers() throws Exception {
1389         for (String id : getCameraIdsUnderTest()) {
1390             try {
1391                 Log.v(TAG, "Testing discardFreeBuffers for Camera " + id);
1392                 openDevice(id);
1393                 discardFreeBuffersTestByCamera();
1394             } finally {
1395                 closeDevice(id);
1396             }
1397         }
1398     }
1399 
1400     /** Tests that usage bits are preserved */
1401     @Test
testUsageRespected()1402     public void testUsageRespected() throws Exception {
1403         final long REQUESTED_USAGE_BITS =
1404                 HardwareBuffer.USAGE_GPU_COLOR_OUTPUT | HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE;
1405         ImageReader reader = ImageReader.newInstance(1, 1, PixelFormat.RGBA_8888, 1,
1406                 REQUESTED_USAGE_BITS);
1407         Surface surface = reader.getSurface();
1408         Canvas canvas = surface.lockHardwareCanvas();
1409         canvas.drawColor(Color.RED);
1410         surface.unlockCanvasAndPost(canvas);
1411         Image image = null;
1412         for (int i = 0; i < 100; i++) {
1413             image = reader.acquireNextImage();
1414             if (image != null) break;
1415             Thread.sleep(10);
1416         }
1417         assertNotNull(image);
1418         HardwareBuffer buffer = image.getHardwareBuffer();
1419         assertNotNull(buffer);
1420         // Mask off the upper vendor bits
1421         int myBits = (int) (buffer.getUsage() & 0xFFFFFFF);
1422         assertWithMessage("Usage bits %s did not contain requested usage bits %s", myBits,
1423                 REQUESTED_USAGE_BITS).that(myBits & REQUESTED_USAGE_BITS)
1424                         .isEqualTo(REQUESTED_USAGE_BITS);
1425     }
1426 
testLandscapeToPortraitOverride(boolean overrideToPortrait)1427     private void testLandscapeToPortraitOverride(boolean overrideToPortrait) throws Exception {
1428         if (!SystemProperties.getBoolean(CameraManager.LANDSCAPE_TO_PORTRAIT_PROP, false)) {
1429             Log.i(TAG, "Landscape to portrait override not supported, skipping test");
1430             return;
1431         }
1432 
1433         for (String id : getCameraIdsUnderTest()) {
1434             CameraCharacteristics c = mCameraManager.getCameraCharacteristics(
1435                     id, /*overrideToPortrait*/false);
1436             int[] modes = c.get(CameraCharacteristics.SCALER_AVAILABLE_ROTATE_AND_CROP_MODES);
1437             boolean supportsRotateAndCrop = false;
1438             for (int mode : modes) {
1439                 if (mode == CameraMetadata.SCALER_ROTATE_AND_CROP_90
1440                         || mode == CameraMetadata.SCALER_ROTATE_AND_CROP_270) {
1441                     supportsRotateAndCrop = true;
1442                     break;
1443                 }
1444             }
1445 
1446             if (!supportsRotateAndCrop) {
1447                 Log.i(TAG, "Skipping non-rotate-and-crop cameraId " + id);
1448                 continue;
1449             }
1450 
1451             int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
1452             if (sensorOrientation != 0 && sensorOrientation != 180) {
1453                 Log.i(TAG, "Skipping portrait orientation sensor cameraId " + id);
1454                 continue;
1455             }
1456 
1457             Log.i(TAG, "Testing overrideToPortrait " + overrideToPortrait
1458                     + " for Camera " + id);
1459 
1460             if (overrideToPortrait) {
1461                 c = mCameraManager.getCameraCharacteristics(id, overrideToPortrait);
1462                 sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
1463                 assertTrue("SENSOR_ORIENTATION should imply portrait sensor.",
1464                         sensorOrientation == 90 || sensorOrientation == 270);
1465             }
1466 
1467             BufferFormatTestParam params = new BufferFormatTestParam(
1468                     ImageFormat.JPEG, /*repeating*/false);
1469             params.mValidateImageData = true;
1470 
1471             try {
1472                 openDevice(id, overrideToPortrait);
1473                 bufferFormatTestByCamera(params);
1474             } finally {
1475                 closeDevice(id);
1476             }
1477         }
1478     }
1479 
1480     @Test
testLandscapeToPortraitOverrideEnabled()1481     public void testLandscapeToPortraitOverrideEnabled() throws Exception {
1482         testLandscapeToPortraitOverride(true);
1483     }
1484 
1485     @Test
testLandscapeToPortraitOverrideDisabled()1486     public void testLandscapeToPortraitOverrideDisabled() throws Exception {
1487         testLandscapeToPortraitOverride(false);
1488     }
1489 
1490     /**
1491      * Convert a rectangular patch in a YUV image to an ARGB color array.
1492      *
1493      * @param w width of the patch.
1494      * @param h height of the patch.
1495      * @param wOffset offset of the left side of the patch.
1496      * @param hOffset offset of the top of the patch.
1497      * @param yuvImage a YUV image to select a patch from.
1498      * @return the image patch converted to RGB as an ARGB color array.
1499      */
convertPixelYuvToRgba(int w, int h, int wOffset, int hOffset, Image yuvImage)1500     private static int[] convertPixelYuvToRgba(int w, int h, int wOffset, int hOffset,
1501                                                Image yuvImage) {
1502         final int CHANNELS = 3; // yuv
1503         final float COLOR_RANGE = 255f;
1504 
1505         assertTrue("Invalid argument to convertPixelYuvToRgba",
1506                 w > 0 && h > 0 && wOffset >= 0 && hOffset >= 0);
1507         assertNotNull(yuvImage);
1508 
1509         int imageFormat = yuvImage.getFormat();
1510         assertTrue("YUV image must have YUV-type format",
1511                 imageFormat == ImageFormat.YUV_420_888 || imageFormat == ImageFormat.YV12 ||
1512                         imageFormat == ImageFormat.NV21);
1513 
1514         int height = yuvImage.getHeight();
1515         int width = yuvImage.getWidth();
1516 
1517         Rect imageBounds = new Rect(/*left*/0, /*top*/0, /*right*/width, /*bottom*/height);
1518         Rect crop = new Rect(/*left*/wOffset, /*top*/hOffset, /*right*/wOffset + w,
1519                 /*bottom*/hOffset + h);
1520         assertTrue("Output rectangle" + crop + " must lie within image bounds " + imageBounds,
1521                 imageBounds.contains(crop));
1522         Image.Plane[] planes = yuvImage.getPlanes();
1523 
1524         Image.Plane yPlane = planes[0];
1525         Image.Plane cbPlane = planes[1];
1526         Image.Plane crPlane = planes[2];
1527 
1528         ByteBuffer yBuf = yPlane.getBuffer();
1529         int yPixStride = yPlane.getPixelStride();
1530         int yRowStride = yPlane.getRowStride();
1531         ByteBuffer cbBuf = cbPlane.getBuffer();
1532         int cbPixStride = cbPlane.getPixelStride();
1533         int cbRowStride = cbPlane.getRowStride();
1534         ByteBuffer crBuf = crPlane.getBuffer();
1535         int crPixStride = crPlane.getPixelStride();
1536         int crRowStride = crPlane.getRowStride();
1537 
1538         int[] output = new int[w * h];
1539 
1540         // TODO: Optimize this with renderscript intrinsics
1541         byte[] yRow = new byte[yPixStride * (w - 1) + 1];
1542         byte[] cbRow = new byte[cbPixStride * (w / 2 - 1) + 1];
1543         byte[] crRow = new byte[crPixStride * (w / 2 - 1) + 1];
1544         yBuf.mark();
1545         cbBuf.mark();
1546         crBuf.mark();
1547         int initialYPos = yBuf.position();
1548         int initialCbPos = cbBuf.position();
1549         int initialCrPos = crBuf.position();
1550         int outputPos = 0;
1551         for (int i = hOffset; i < hOffset + h; i++) {
1552             yBuf.position(initialYPos + i * yRowStride + wOffset * yPixStride);
1553             yBuf.get(yRow);
1554             if ((i & 1) == (hOffset & 1)) {
1555                 cbBuf.position(initialCbPos + (i / 2) * cbRowStride + wOffset * cbPixStride / 2);
1556                 cbBuf.get(cbRow);
1557                 crBuf.position(initialCrPos + (i / 2) * crRowStride + wOffset * crPixStride / 2);
1558                 crBuf.get(crRow);
1559             }
1560             for (int j = 0, yPix = 0, crPix = 0, cbPix = 0; j < w; j++, yPix += yPixStride) {
1561                 float y = yRow[yPix] & 0xFF;
1562                 float cb = cbRow[cbPix] & 0xFF;
1563                 float cr = crRow[crPix] & 0xFF;
1564 
1565                 // convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section)
1566                 int r = (int) Math.max(0.0f, Math.min(COLOR_RANGE, y + 1.402f * (cr - 128)));
1567                 int g = (int) Math.max(0.0f,
1568                         Math.min(COLOR_RANGE, y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128)));
1569                 int b = (int) Math.max(0.0f, Math.min(COLOR_RANGE, y + 1.772f * (cb - 128)));
1570 
1571                 // Convert to ARGB pixel color (use opaque alpha)
1572                 output[outputPos++] = Color.rgb(r, g, b);
1573 
1574                 if ((j & 1) == 1) {
1575                     crPix += crPixStride;
1576                     cbPix += cbPixStride;
1577                 }
1578             }
1579         }
1580         yBuf.rewind();
1581         cbBuf.rewind();
1582         crBuf.rewind();
1583 
1584         return output;
1585     }
1586 
1587     /**
1588      * Test capture a given format stream with yuv stream simultaneously.
1589      *
1590      * <p>Use fixed yuv size, varies targeted format capture size. Single capture is tested.</p>
1591      *
1592      * @param format The capture format to be tested along with yuv format.
1593      */
bufferFormatWithYuvTestByCamera(int format)1594     private void bufferFormatWithYuvTestByCamera(int format) throws Exception {
1595         bufferFormatWithYuvTestByCamera(format, false);
1596     }
1597 
1598     /**
1599      * Test capture a given format stream with yuv stream simultaneously.
1600      *
1601      * <p>Use fixed yuv size, varies targeted format capture size. Single capture is tested.</p>
1602      *
1603      * @param format The capture format to be tested along with yuv format.
1604      * @param setUsageFlag The ImageReader factory method to be used (with or without specifying
1605      *                     usage flag)
1606      */
bufferFormatWithYuvTestByCamera(int format, boolean setUsageFlag)1607     private void bufferFormatWithYuvTestByCamera(int format, boolean setUsageFlag)
1608             throws Exception {
1609         bufferFormatWithYuvTestByCamera(format, setUsageFlag, /*useYuvSize*/false);
1610     }
1611 
1612     /**
1613      * Test capture a given format stream with yuv stream simultaneously.
1614      *
1615      * <p>Compared to bufferFormatWithYuvTestByCamera(int, boolean), this function
1616      * provide an option to use the same size between the 2 streams.</p>
1617      *
1618      * @param format The capture format to be tested along with yuv format.
1619      * @param setUsageFlag The ImageReader factory method to be used (with or without specifying
1620      *                     usage flag)
1621      * @param useYuvSize The capture size will be the same as the yuv size
1622      */
bufferFormatWithYuvTestByCamera(int format, boolean setUsageFlag, boolean useYuvSize)1623     private void bufferFormatWithYuvTestByCamera(int format, boolean setUsageFlag,
1624             boolean useYuvSize) throws Exception {
1625         if (format != ImageFormat.JPEG && format != ImageFormat.RAW_SENSOR
1626                 && format != ImageFormat.PRIVATE
1627                 && format != ImageFormat.YUV_420_888) {
1628             throw new IllegalArgumentException("Unsupported format: " + format);
1629         }
1630 
1631         final int NUM_SINGLE_CAPTURE_TESTED = MAX_NUM_IMAGES - 1;
1632         Size maxYuvSz = mOrderedPreviewSizes.get(0);
1633         Size[] targetCaptureSizes = useYuvSize ? new Size[]{maxYuvSz} :
1634                 mStaticInfo.getAvailableSizesForFormatChecked(format,
1635                 StaticMetadata.StreamDirection.Output);
1636 
1637         for (Size captureSz : targetCaptureSizes) {
1638             if (VERBOSE) {
1639                 Log.v(TAG, "Testing yuv size " + maxYuvSz.toString() + " and capture size "
1640                         + captureSz.toString() + " for camera " + mCamera.getId());
1641             }
1642 
1643             ImageReader captureReader = null;
1644             ImageReader yuvReader = null;
1645             try {
1646                 // Create YUV image reader
1647                 SimpleImageReaderListener yuvListener  = new SimpleImageReaderListener();
1648                 if (setUsageFlag) {
1649                     yuvReader = createImageReader(maxYuvSz, ImageFormat.YUV_420_888, MAX_NUM_IMAGES,
1650                             HardwareBuffer.USAGE_CPU_READ_OFTEN, yuvListener);
1651                 } else {
1652                     yuvReader = createImageReader(maxYuvSz, ImageFormat.YUV_420_888, MAX_NUM_IMAGES,
1653                             yuvListener);
1654                 }
1655 
1656                 Surface yuvSurface = yuvReader.getSurface();
1657 
1658                 // Create capture image reader
1659                 SimpleImageReaderListener captureListener = new SimpleImageReaderListener();
1660                 boolean isPrivateFormat = (format == ImageFormat.PRIVATE);
1661                 long usage = isPrivateFormat ? HardwareBuffer.USAGE_COMPOSER_OVERLAY :
1662                         HardwareBuffer.USAGE_CPU_READ_OFTEN;
1663                 if (setUsageFlag || isPrivateFormat) {
1664                     captureReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
1665                             usage, captureListener);
1666                 } else {
1667                     captureReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
1668                             captureListener);
1669                 }
1670                 Surface captureSurface = captureReader.getSurface();
1671 
1672                 // Capture images.
1673                 List<Surface> outputSurfaces = new ArrayList<Surface>();
1674                 outputSurfaces.add(yuvSurface);
1675                 outputSurfaces.add(captureSurface);
1676                 CaptureRequest.Builder request = prepareCaptureRequestForSurfaces(outputSurfaces,
1677                         CameraDevice.TEMPLATE_PREVIEW);
1678                 SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1679 
1680                 for (int i = 0; i < NUM_SINGLE_CAPTURE_TESTED; i++) {
1681                     startCapture(request.build(), /*repeating*/false, resultListener, mHandler);
1682                 }
1683 
1684                 // Verify capture result and images
1685                 for (int i = 0; i < NUM_SINGLE_CAPTURE_TESTED; i++) {
1686                     resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
1687                     if (VERBOSE) {
1688                         Log.v(TAG, " Got the capture result back for " + i + "th capture");
1689                     }
1690 
1691                     Image yuvImage = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
1692                     if (VERBOSE) {
1693                         Log.v(TAG, " Got the yuv image back for " + i + "th capture");
1694                     }
1695 
1696                     Image captureImage = captureListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
1697                     if (VERBOSE) {
1698                         Log.v(TAG, " Got the capture image back for " + i + "th capture");
1699                     }
1700 
1701                     //Validate captured images.
1702                     CameraTestUtils.validateImage(yuvImage, maxYuvSz.getWidth(),
1703                             maxYuvSz.getHeight(), ImageFormat.YUV_420_888, /*filePath*/null);
1704                     CameraTestUtils.validateImage(captureImage, captureSz.getWidth(),
1705                             captureSz.getHeight(), format, /*filePath*/null);
1706                     yuvImage.close();
1707                     captureImage.close();
1708                 }
1709 
1710                 // Stop capture, delete the streams.
1711                 stopCapture(/*fast*/false);
1712             } finally {
1713                 closeImageReader(captureReader);
1714                 captureReader = null;
1715                 closeImageReader(yuvReader);
1716                 yuvReader = null;
1717             }
1718         }
1719     }
1720 
invalidAccessTestAfterClose()1721     private void invalidAccessTestAfterClose() throws Exception {
1722         final int FORMAT = mStaticInfo.isColorOutputSupported() ?
1723             ImageFormat.YUV_420_888 : ImageFormat.DEPTH16;
1724 
1725         Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(FORMAT,
1726                 StaticMetadata.StreamDirection.Output);
1727         Image img = null;
1728         // Create ImageReader.
1729         mListener = new SimpleImageListener();
1730         createDefaultImageReader(availableSizes[0], FORMAT, MAX_NUM_IMAGES, mListener);
1731 
1732         // Start capture.
1733         CaptureRequest request = prepareCaptureRequest();
1734         SimpleCaptureCallback listener = new SimpleCaptureCallback();
1735         startCapture(request, /* repeating */false, listener, mHandler);
1736 
1737         mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
1738         img = mReader.acquireNextImage();
1739         Plane firstPlane = img.getPlanes()[0];
1740         ByteBuffer buffer = firstPlane.getBuffer();
1741         img.close();
1742 
1743         imageInvalidAccessTestAfterClose(img, firstPlane, buffer);
1744     }
1745 
1746     /**
1747      * Test that images captured after discarding free buffers are valid.
1748      */
discardFreeBuffersTestByCamera()1749     private void discardFreeBuffersTestByCamera() throws Exception {
1750         final int FORMAT = mStaticInfo.isColorOutputSupported() ?
1751             ImageFormat.YUV_420_888 : ImageFormat.DEPTH16;
1752 
1753         final Size SIZE = mStaticInfo.getAvailableSizesForFormatChecked(FORMAT,
1754                 StaticMetadata.StreamDirection.Output)[0];
1755         // Create ImageReader.
1756         mListener = new SimpleImageListener();
1757         createDefaultImageReader(SIZE, FORMAT, MAX_NUM_IMAGES, mListener);
1758 
1759         // Start capture.
1760         final boolean REPEATING = true;
1761         final boolean SINGLE = false;
1762         CaptureRequest request = prepareCaptureRequest();
1763         SimpleCaptureCallback listener = new SimpleCaptureCallback();
1764         startCapture(request, REPEATING, listener, mHandler);
1765 
1766         // Validate images and capture results.
1767         validateImage(SIZE, FORMAT, NUM_FRAME_VERIFIED, REPEATING, /*colorSpace*/ null);
1768         validateCaptureResult(FORMAT, SIZE, listener, NUM_FRAME_VERIFIED);
1769 
1770         // Discard free buffers.
1771         mReader.discardFreeBuffers();
1772 
1773         // Validate images and capture resulst again.
1774         validateImage(SIZE, FORMAT, NUM_FRAME_VERIFIED, REPEATING, /*colorSpace*/ null);
1775         validateCaptureResult(FORMAT, SIZE, listener, NUM_FRAME_VERIFIED);
1776 
1777         // Stop repeating request in preparation for discardFreeBuffers
1778         mCameraSession.stopRepeating();
1779         mCameraSessionListener.getStateWaiter().waitForState(
1780                 BlockingSessionCallback.SESSION_READY, SESSION_READY_TIMEOUT_MS);
1781 
1782         // Drain the reader queue and discard free buffers from the reader.
1783         Image img = mReader.acquireLatestImage();
1784         if (img != null) {
1785             img.close();
1786         }
1787         mReader.discardFreeBuffers();
1788 
1789         // Do a single capture for camera device to reallocate buffers
1790         mListener.reset();
1791         startCapture(request, SINGLE, listener, mHandler);
1792         validateImage(SIZE, FORMAT, /*captureCount*/ 1, SINGLE, /*colorSpace*/ null);
1793     }
1794 
1795     private class BufferFormatTestParam {
1796         public int mFormat;
1797         public boolean mRepeating;
1798         public boolean mSetUsageFlag = false;
1799         public long mUsageFlag = HardwareBuffer.USAGE_CPU_READ_OFTEN;
1800         public boolean mCheckSession = false;
1801         public boolean mValidateImageData = true;
1802         public String mPhysicalId = null;
1803         public long mDynamicRangeProfile = DynamicRangeProfiles.STANDARD;
1804         public ColorSpace.Named mColorSpace;
1805         public boolean mUseColorSpace = false;
1806         public int mTimestampBase = OutputConfiguration.TIMESTAMP_BASE_DEFAULT;
1807         public boolean mUseDataSpace = false;
1808         public int mDataSpace = DataSpace.DATASPACE_UNKNOWN;
1809         public int mHardwareBufferFormat = HardwareBuffer.BLOB;
1810 
BufferFormatTestParam(int format, boolean repeating)1811         BufferFormatTestParam(int format, boolean repeating) {
1812             mFormat = format;
1813             mRepeating = repeating;
1814         }
1815     };
1816 
bufferFormatTestByCamera(BufferFormatTestParam params)1817     private void bufferFormatTestByCamera(BufferFormatTestParam params)
1818             throws Exception {
1819         int format = params.mFormat;
1820         boolean setUsageFlag = params.mSetUsageFlag;
1821         long usageFlag = params.mUsageFlag;
1822         boolean repeating = params.mRepeating;
1823         boolean validateImageData = params.mValidateImageData;
1824         int timestampBase = params.mTimestampBase;
1825 
1826         String physicalId = params.mPhysicalId;
1827         StaticMetadata staticInfo;
1828         if (physicalId == null) {
1829             staticInfo = mStaticInfo;
1830         } else {
1831             staticInfo = mAllStaticInfo.get(physicalId);
1832         }
1833 
1834         Size[] availableSizes = staticInfo.getAvailableSizesForFormatChecked(format,
1835                 StaticMetadata.StreamDirection.Output);
1836 
1837         boolean secureTest = setUsageFlag &&
1838                 ((usageFlag & HardwareBuffer.USAGE_PROTECTED_CONTENT) != 0);
1839         Size secureDataSize = null;
1840         if (secureTest) {
1841             secureDataSize = staticInfo.getCharacteristics().get(
1842                     CameraCharacteristics.SCALER_DEFAULT_SECURE_IMAGE_SIZE);
1843         }
1844 
1845         boolean validateTimestampBase = (timestampBase
1846                 != OutputConfiguration.TIMESTAMP_BASE_DEFAULT);
1847         Integer deviceTimestampSource = staticInfo.getCharacteristics().get(
1848                 CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE);
1849         // for each resolution, test imageReader:
1850         for (Size sz : availableSizes) {
1851             try {
1852                 // For secure mode test only test default secure data size if HAL advertises one.
1853                 if (secureDataSize != null && !secureDataSize.equals(sz)) {
1854                     continue;
1855                 }
1856 
1857                 if (VERBOSE) {
1858                     Log.v(TAG, "Testing size " + sz.toString() + " format " + format
1859                             + " for camera " + mCamera.getId());
1860                 }
1861 
1862                 // Create ImageReader.
1863                 mListener  = new SimpleImageListener();
1864                 if (params.mUseDataSpace) {
1865                     createDefaultImageReader(sz, params.mHardwareBufferFormat, MAX_NUM_IMAGES,
1866                             usageFlag, params.mDataSpace, mListener);
1867                 } else if (setUsageFlag) {
1868                     createDefaultImageReader(sz, format, MAX_NUM_IMAGES, usageFlag, mListener);
1869                 } else {
1870                     createDefaultImageReader(sz, format, MAX_NUM_IMAGES, mListener);
1871                 }
1872 
1873                 // Don't queue up images if we won't validate them
1874                 if (!validateImageData && !validateTimestampBase) {
1875                     ImageDropperListener imageDropperListener = new ImageDropperListener();
1876                     mReader.setOnImageAvailableListener(imageDropperListener, mHandler);
1877                 }
1878 
1879                 if (params.mCheckSession) {
1880                     checkImageReaderSessionConfiguration(
1881                             "Camera capture session validation for format: " + format + "failed",
1882                             physicalId);
1883                 }
1884 
1885                 ArrayList<OutputConfiguration> outputConfigs = new ArrayList<>();
1886                 OutputConfiguration config = new OutputConfiguration(mReader.getSurface());
1887                 assertTrue("Default timestamp source must be DEFAULT",
1888                         config.getTimestampBase() == OutputConfiguration.TIMESTAMP_BASE_DEFAULT);
1889                 assertTrue("Default mirroring mode must be AUTO",
1890                         config.getMirrorMode() == OutputConfiguration.MIRROR_MODE_AUTO);
1891                 if (physicalId != null) {
1892                     config.setPhysicalCameraId(physicalId);
1893                 }
1894                 config.setDynamicRangeProfile(params.mDynamicRangeProfile);
1895                 config.setTimestampBase(params.mTimestampBase);
1896                 outputConfigs.add(config);
1897 
1898                 CaptureRequest request;
1899                 if (params.mUseColorSpace) {
1900                     request = prepareCaptureRequestForColorSpace(
1901                         outputConfigs, CameraDevice.TEMPLATE_PREVIEW, params.mColorSpace)
1902                         .build();
1903                 } else {
1904                     request = prepareCaptureRequestForConfigs(
1905                         outputConfigs, CameraDevice.TEMPLATE_PREVIEW).build();
1906                 }
1907 
1908                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
1909                 startCapture(request, repeating, listener, mHandler);
1910 
1911                 int numFrameVerified = repeating ? NUM_FRAME_VERIFIED : 1;
1912 
1913                 if (validateTimestampBase) {
1914                     validateTimestamps(deviceTimestampSource, timestampBase, numFrameVerified,
1915                             listener, repeating);
1916                 }
1917 
1918                 if (validateImageData) {
1919                     // Validate images.
1920                     ColorSpace colorSpace = null;
1921                     if (params.mUseColorSpace) {
1922                         colorSpace = ColorSpace.get(params.mColorSpace);
1923                     }
1924                     validateImage(sz, format, numFrameVerified, repeating, colorSpace);
1925                 }
1926 
1927                 // Validate capture result.
1928                 validateCaptureResult(format, sz, listener, numFrameVerified);
1929 
1930                 // stop capture.
1931                 stopCapture(/*fast*/false);
1932             } finally {
1933                 closeDefaultImageReader();
1934             }
1935 
1936             // Only test one size for non-default timestamp base.
1937             if (timestampBase != OutputConfiguration.TIMESTAMP_BASE_DEFAULT) break;
1938         }
1939     }
1940 
bufferFormatLongProcessingTimeTestByCamera(int format)1941     private void bufferFormatLongProcessingTimeTestByCamera(int format)
1942             throws Exception {
1943 
1944         final int TEST_SENSITIVITY_VALUE = mStaticInfo.getSensitivityClampToRange(204);
1945         final long TEST_EXPOSURE_TIME_NS = mStaticInfo.getExposureClampToRange(28000000);
1946         final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000;
1947 
1948         Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(format,
1949                 StaticMetadata.StreamDirection.Output);
1950 
1951         Size[] testSizes = getMinAndMaxSizes(availableSizes);
1952 
1953         // for each resolution, test imageReader:
1954         for (Size sz : testSizes) {
1955             Log.v(TAG, "testing size " + sz.toString());
1956             try {
1957                 if (VERBOSE) {
1958                     Log.v(TAG, "Testing long processing time: size " + sz.toString() + " format " +
1959                             format + " for camera " + mCamera.getId());
1960                 }
1961 
1962                 // Create ImageReader.
1963                 mListener  = new SimpleImageListener();
1964                 createDefaultImageReader(sz, format, MAX_NUM_IMAGES, mListener);
1965 
1966                 // Setting manual controls
1967                 List<Surface> outputSurfaces = new ArrayList<Surface>();
1968                 outputSurfaces.add(mReader.getSurface());
1969                 CaptureRequest.Builder requestBuilder = prepareCaptureRequestForSurfaces(
1970                         outputSurfaces, CameraDevice.TEMPLATE_STILL_CAPTURE);
1971                 // Need to consume the SESSION_READY state because stopCapture() waits
1972                 // on an additional SESSION_READY state.
1973                 mCameraSessionListener.getStateWaiter().
1974                     waitForState(BlockingSessionCallback.SESSION_READY, CAMERA_IDLE_TIMEOUT_MS);
1975 
1976                 requestBuilder.set(
1977                         CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_OFF);
1978                 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
1979                 requestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);
1980                 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
1981                         CaptureRequest.CONTROL_AE_MODE_OFF);
1982                 requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE,
1983                         CaptureRequest.CONTROL_AWB_MODE_OFF);
1984                 requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, TEST_SENSITIVITY_VALUE);
1985                 requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, TEST_EXPOSURE_TIME_NS);
1986 
1987                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
1988                 startCapture(requestBuilder.build(), /*repeating*/true, listener, mHandler);
1989 
1990                 for (int i = 0; i < NUM_LONG_PROCESS_TIME_FRAME_VERIFIED; i++) {
1991                     mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
1992 
1993                     // Verify image.
1994                     Image img = mReader.acquireNextImage();
1995                     assertNotNull("Unable to acquire next image", img);
1996                     CameraTestUtils.validateImage(img, sz.getWidth(), sz.getHeight(), format,
1997                             mDebugFileNameBase);
1998 
1999                     // Verify the exposure time and iso match the requested values.
2000                     CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
2001 
2002                     long exposureTimeDiff = TEST_EXPOSURE_TIME_NS -
2003                             getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
2004                     int sensitivityDiff = TEST_SENSITIVITY_VALUE -
2005                             getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
2006 
2007                     mCollector.expectTrue(
2008                             String.format("Long processing frame %d format %d size %s " +
2009                                     "exposure time was %d expecting %d.", i, format, sz.toString(),
2010                                     getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME),
2011                                     TEST_EXPOSURE_TIME_NS),
2012                             exposureTimeDiff < EXPOSURE_TIME_ERROR_MARGIN_NS &&
2013                             exposureTimeDiff >= 0);
2014 
2015                     mCollector.expectTrue(
2016                             String.format("Long processing frame %d format %d size %s " +
2017                                     "sensitivity was %d expecting %d.", i, format, sz.toString(),
2018                                     getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY),
2019                                     TEST_SENSITIVITY_VALUE),
2020                             sensitivityDiff >= 0);
2021 
2022                     // Sleep to Simulate long porcessing before closing the image.
2023                     Thread.sleep(LONG_PROCESS_TIME_MS);
2024                     img.close();
2025                 }
2026                 // Stop capture.
2027                 // Drain the reader queue in case the full queue blocks
2028                 // HAL from delivering new results
2029                 ImageDropperListener imageDropperListener = new ImageDropperListener();
2030                 mReader.setOnImageAvailableListener(imageDropperListener, mHandler);
2031                 Image img = mReader.acquireLatestImage();
2032                 if (img != null) {
2033                     img.close();
2034                 }
2035                 stopCapture(/*fast*/true);
2036             } finally {
2037                 closeDefaultImageReader();
2038             }
2039         }
2040     }
2041 
2042     /**
2043      * Validate capture results.
2044      *
2045      * @param format The format of this capture.
2046      * @param size The capture size.
2047      * @param listener The capture listener to get capture result callbacks.
2048      */
validateCaptureResult(int format, Size size, SimpleCaptureCallback listener, int numFrameVerified)2049     private void validateCaptureResult(int format, Size size, SimpleCaptureCallback listener,
2050             int numFrameVerified) {
2051         for (int i = 0; i < numFrameVerified; i++) {
2052             CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
2053 
2054             // TODO: Update this to use availableResultKeys once shim supports this.
2055             if (mStaticInfo.isCapabilitySupported(
2056                     CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) {
2057                 StaticMetadata staticInfo = mStaticInfo;
2058                 boolean supportActivePhysicalIdConsistency =
2059                         PropertyUtil.getFirstApiLevel() >= Build.VERSION_CODES.S;
2060                 if (mStaticInfo.isLogicalMultiCamera() && supportActivePhysicalIdConsistency
2061                         && mStaticInfo.isActivePhysicalCameraIdSupported()) {
2062                     String activePhysicalId =
2063                             result.get(CaptureResult.LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID);
2064                     staticInfo = mAllStaticInfo.get(activePhysicalId);
2065                 }
2066 
2067                 Long exposureTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
2068                 Integer sensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
2069                 mCollector.expectInRange(
2070                         String.format(
2071                                 "Capture for format %d, size %s exposure time is invalid.",
2072                                 format, size.toString()),
2073                         exposureTime,
2074                         staticInfo.getExposureMinimumOrDefault(),
2075                         staticInfo.getExposureMaximumOrDefault()
2076                 );
2077                 mCollector.expectInRange(
2078                         String.format("Capture for format %d, size %s sensitivity is invalid.",
2079                                 format, size.toString()),
2080                         sensitivity,
2081                         staticInfo.getSensitivityMinimumOrDefault(),
2082                         staticInfo.getSensitivityMaximumOrDefault()
2083                 );
2084             }
2085             // TODO: add more key validations.
2086         }
2087     }
2088 
2089     private final class SimpleImageListener implements ImageReader.OnImageAvailableListener {
2090         private final ConditionVariable imageAvailable = new ConditionVariable();
2091         @Override
onImageAvailable(ImageReader reader)2092         public void onImageAvailable(ImageReader reader) {
2093             if (mReader != reader) {
2094                 return;
2095             }
2096 
2097             if (VERBOSE) Log.v(TAG, "new image available");
2098             imageAvailable.open();
2099         }
2100 
waitForAnyImageAvailable(long timeout)2101         public void waitForAnyImageAvailable(long timeout) {
2102             if (imageAvailable.block(timeout)) {
2103                 imageAvailable.close();
2104             } else {
2105                 fail("wait for image available timed out after " + timeout + "ms");
2106             }
2107         }
2108 
closePendingImages()2109         public void closePendingImages() {
2110             Image image = mReader.acquireLatestImage();
2111             if (image != null) {
2112                 image.close();
2113             }
2114         }
2115 
reset()2116         public void reset() {
2117             imageAvailable.close();
2118         }
2119     }
2120 
validateImage(Size sz, int format, int captureCount, boolean repeating, ColorSpace colorSpace)2121     private void validateImage(Size sz, int format, int captureCount,  boolean repeating,
2122             ColorSpace colorSpace) throws Exception {
2123         // TODO: Add more format here, and wrap each one as a function.
2124         Image img;
2125         final int MAX_RETRY_COUNT = 20;
2126         int numImageVerified = 0;
2127         int reTryCount = 0;
2128         while (numImageVerified < captureCount) {
2129             assertNotNull("Image listener is null", mListener);
2130             if (VERBOSE) Log.v(TAG, "Waiting for an Image");
2131             mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
2132             if (repeating) {
2133                 /**
2134                  * Acquire the latest image in case the validation is slower than
2135                  * the image producing rate.
2136                  */
2137                 img = mReader.acquireLatestImage();
2138                 /**
2139                  * Sometimes if multiple onImageAvailable callbacks being queued,
2140                  * acquireLatestImage will clear all buffer before corresponding callback is
2141                  * executed. Wait for a new frame in that case.
2142                  */
2143                 if (img == null && reTryCount < MAX_RETRY_COUNT) {
2144                     reTryCount++;
2145                     continue;
2146                 }
2147             } else {
2148                 img = mReader.acquireNextImage();
2149             }
2150             assertNotNull("Unable to acquire the latest image", img);
2151             if (VERBOSE) Log.v(TAG, "Got the latest image");
2152             CameraTestUtils.validateImage(img, sz.getWidth(), sz.getHeight(), format,
2153                     mDebugFileNameBase, colorSpace);
2154             HardwareBuffer hwb = img.getHardwareBuffer();
2155             assertNotNull("Unable to retrieve the Image's HardwareBuffer", hwb);
2156             if (format == ImageFormat.DEPTH_JPEG) {
2157                 byte [] dynamicDepthBuffer = CameraTestUtils.getDataFromImage(img);
2158                 assertTrue("Dynamic depth validation failed!",
2159                         validateDynamicDepthNative(dynamicDepthBuffer));
2160             }
2161             if (VERBOSE) Log.v(TAG, "finish validation of image " + numImageVerified);
2162             img.close();
2163             numImageVerified++;
2164             reTryCount = 0;
2165         }
2166 
2167         // Return all pending images to the ImageReader as the validateImage may
2168         // take a while to return and there could be many images pending.
2169         mListener.closePendingImages();
2170     }
2171 
validateTimestamps(Integer deviceTimestampSource, int timestampBase, int captureCount, SimpleCaptureCallback listener, boolean repeating)2172     private void validateTimestamps(Integer deviceTimestampSource, int timestampBase,
2173             int captureCount, SimpleCaptureCallback listener, boolean repeating) throws Exception {
2174         Image img;
2175         final int MAX_RETRY_COUNT = 20;
2176         int numImageVerified = 0;
2177         int retryCount = 0;
2178         List<Long> imageTimestamps = new ArrayList<Long>();
2179         assertNotNull("Image listener is null", mListener);
2180         while (numImageVerified < captureCount) {
2181             if (VERBOSE) Log.v(TAG, "Waiting for an Image");
2182             mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
2183             if (repeating) {
2184                 img = mReader.acquireNextImage();
2185                 if (img == null && retryCount < MAX_RETRY_COUNT) {
2186                     retryCount++;
2187                     continue;
2188                 }
2189             } else {
2190                 img = mReader.acquireNextImage();
2191             }
2192             assertNotNull("Unable to acquire the latest image", img);
2193             if (VERBOSE) {
2194                 Log.v(TAG, "Got the latest image with timestamp " + img.getTimestamp());
2195             }
2196             imageTimestamps.add(img.getTimestamp());
2197             img.close();
2198             numImageVerified++;
2199             retryCount = 0;
2200         }
2201 
2202         List<Long> captureStartTimestamps = listener.getCaptureStartTimestamps(captureCount);
2203         if (VERBOSE) {
2204             Log.v(TAG, "deviceTimestampSource: " + deviceTimestampSource
2205                     + ", timestampBase: " + timestampBase + ", captureStartTimestamps: "
2206                     + captureStartTimestamps + ", imageTimestamps: " + imageTimestamps);
2207         }
2208         if (timestampBase == OutputConfiguration.TIMESTAMP_BASE_SENSOR
2209                 || (timestampBase == OutputConfiguration.TIMESTAMP_BASE_MONOTONIC
2210                 && deviceTimestampSource == CameraMetadata.SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN)
2211                 || (timestampBase == OutputConfiguration.TIMESTAMP_BASE_REALTIME
2212                 && deviceTimestampSource == CameraMetadata.SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME)) {
2213             // Makes sure image timestamps match capture started timestamp
2214             for (Long timestamp : imageTimestamps) {
2215                 mCollector.expectTrue("Image timestamp " + timestamp
2216                         + " should match one of onCaptureStarted timestamps "
2217                         + captureStartTimestamps,
2218                         captureStartTimestamps.contains(timestamp));
2219             }
2220         } else if (timestampBase == OutputConfiguration.TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED) {
2221             // Make sure that timestamp base is MONOTONIC. Do not strictly check against
2222             // choreographer callback because there are cases camera framework doesn't use
2223             // choreographer timestamp (when consumer is slower than camera for example).
2224             final int TIMESTAMP_THRESHOLD_MILLIS = 3000; // 3 seconds
2225             long monotonicTime = SystemClock.uptimeMillis();
2226             for (Long timestamp : imageTimestamps) {
2227                 long timestampMs = TimeUnit.NANOSECONDS.toMillis(timestamp);
2228                 mCollector.expectTrue("Image timestamp " + timestampMs + " ms should be in the "
2229                         + "same timebase as SystemClock.updateMillis " + monotonicTime
2230                         + " ms when timestamp base is set to CHOREOGRAPHER synced",
2231                         Math.abs(timestampMs - monotonicTime) < TIMESTAMP_THRESHOLD_MILLIS);
2232             }
2233         }
2234 
2235         // Return all pending images to the ImageReader as the validateImage may
2236         // take a while to return and there could be many images pending.
2237         mListener.closePendingImages();
2238     }
2239 
2240     /**
2241      * Gets the list of test sizes to run the test on, given the array of available sizes.
2242      * For ImageReaderTest, where the sizes are not the most relevant, it is sufficient to test with
2243      * just the min and max size, which helps reduce test time significantly.
2244      */
getMinAndMaxSizes(Size[] availableSizes)2245     private Size[] getMinAndMaxSizes(Size[] availableSizes) {
2246         if (availableSizes.length <= 2) {
2247             return availableSizes;
2248         }
2249 
2250         Size[] testSizes = new Size[2];
2251         Size maxSize = availableSizes[0];
2252         Size minSize = availableSizes[1];
2253 
2254         for (Size size : availableSizes) {
2255             if (size.getWidth() * size.getHeight() > maxSize.getWidth() * maxSize.getHeight()) {
2256                 maxSize = size;
2257             }
2258 
2259             if (size.getWidth() * size.getHeight() < minSize.getWidth() * minSize.getHeight()) {
2260                 minSize = size;
2261             }
2262         }
2263 
2264         testSizes[0] = minSize;
2265         testSizes[1] = maxSize;
2266 
2267         return testSizes;
2268     }
2269 
2270     /** Load dynamic depth validation jni on initialization */
2271     static {
2272         System.loadLibrary("ctscamera2_jni");
2273     }
2274     /**
2275      * Use the dynamic depth SDK to validate a dynamic depth file stored in the buffer.
2276      *
2277      * Returns false if the dynamic depth has validation errors. Validation warnings/errors
2278      * will be printed to logcat.
2279      */
validateDynamicDepthNative(byte[] dynamicDepthBuffer)2280     public static native boolean validateDynamicDepthNative(byte[] dynamicDepthBuffer);
2281 }
2282