• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS;
20 import static android.hardware.camera2.cts.CameraTestUtils.SESSION_READY_TIMEOUT_MS;
21 import static android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
22 import static android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener;
23 import static android.hardware.camera2.cts.CameraTestUtils.dumpFile;
24 import static android.hardware.camera2.cts.CameraTestUtils.getValueNotNull;
25 
26 import static com.google.common.truth.Truth.assertWithMessage;
27 
28 import static junit.framework.Assert.assertEquals;
29 import static junit.framework.Assert.assertFalse;
30 import static junit.framework.Assert.assertNotNull;
31 import static junit.framework.Assert.assertTrue;
32 import static junit.framework.Assert.fail;
33 
34 import android.graphics.Bitmap;
35 import android.graphics.BitmapFactory;
36 import android.graphics.BitmapRegionDecoder;
37 import android.graphics.Canvas;
38 import android.graphics.Color;
39 import android.graphics.ColorSpace;
40 import android.graphics.ImageFormat;
41 import android.graphics.Matrix;
42 import android.graphics.PixelFormat;
43 import android.graphics.Rect;
44 import android.graphics.RectF;
45 import android.hardware.DataSpace;
46 import android.hardware.HardwareBuffer;
47 import android.hardware.camera2.CameraCharacteristics;
48 import android.hardware.camera2.CameraDevice;
49 import android.hardware.camera2.CameraManager;
50 import android.hardware.camera2.CameraMetadata;
51 import android.hardware.camera2.CaptureRequest;
52 import android.hardware.camera2.CaptureResult;
53 import android.hardware.camera2.cts.CameraTestUtils.ImageDropperListener;
54 import android.hardware.camera2.cts.helpers.StaticMetadata;
55 import android.hardware.camera2.cts.rs.BitmapUtils;
56 import android.hardware.camera2.cts.testcases.Camera2AndroidTestCase;
57 import android.hardware.camera2.params.DynamicRangeProfiles;
58 import android.hardware.camera2.params.OutputConfiguration;
59 import android.hardware.camera2.params.StreamConfigurationMap;
60 import android.media.Image;
61 import android.media.Image.Plane;
62 import android.media.ImageReader;
63 import android.media.ImageWriter;
64 import android.os.Build;
65 import android.os.ConditionVariable;
66 import android.os.SystemClock;
67 import android.os.SystemProperties;
68 import android.util.Log;
69 import android.util.Size;
70 import android.view.Surface;
71 
72 import com.android.compatibility.common.util.PropertyUtil;
73 import com.android.ex.camera2.blocking.BlockingSessionCallback;
74 
75 import org.junit.Test;
76 import org.junit.runner.RunWith;
77 import org.junit.runners.Parameterized;
78 
79 import java.nio.ByteBuffer;
80 import java.util.ArrayList;
81 import java.util.Arrays;
82 import java.util.List;
83 import java.util.Set;
84 import java.util.concurrent.TimeUnit;
85 
86 /**
87  * <p>Basic test for ImageReader APIs. It uses CameraDevice as producer, camera
88  * sends the data to the surface provided by imageReader. Below image formats
89  * are tested:</p>
90  *
91  * <p>YUV_420_888: flexible YUV420, it is mandatory format for camera. </p>
92  * <p>JPEG: used for JPEG still capture, also mandatory format. </p>
93  * <p>Some invalid access test. </p>
94  * <p>TODO: Add more format tests? </p>
95  */
96 @RunWith(Parameterized.class)
97 public class ImageReaderTest extends Camera2AndroidTestCase {
98     private static final String TAG = "ImageReaderTest";
99     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
100     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
101 
102     // Number of frame (for streaming requests) to be verified.
103     private static final int NUM_FRAME_VERIFIED = 2;
104     // Number of frame (for streaming requests) to be verified with log processing time.
105     private static final int NUM_LONG_PROCESS_TIME_FRAME_VERIFIED = 10;
106     // The time to hold each image for to simulate long processing time.
107     private static final int LONG_PROCESS_TIME_MS = 300;
108     // Max number of images can be accessed simultaneously from ImageReader.
109     private static final int MAX_NUM_IMAGES = 5;
110     // Max difference allowed between YUV and JPEG patches. This tolerance is intentionally very
111     // generous to avoid false positives due to punch/saturation operations vendors apply to the
112     // JPEG outputs.
113     private static final double IMAGE_DIFFERENCE_TOLERANCE = 40;
114     // Legacy level devices needs even larger tolerance because jpeg and yuv are not captured
115     // from the same frame in legacy mode.
116     private static final double IMAGE_DIFFERENCE_TOLERANCE_LEGACY = 60;
117 
118     private SimpleImageListener mListener;
119 
120     @Override
setUp()121     public void setUp() throws Exception {
122         super.setUp();
123     }
124 
125     @Override
tearDown()126     public void tearDown() throws Exception {
127         super.tearDown();
128     }
129 
130     @Test
testFlexibleYuv()131     public void testFlexibleYuv() throws Exception {
132         for (String id : mCameraIdsUnderTest) {
133             try {
134                 Log.i(TAG, "Testing Camera " + id);
135                 openDevice(id);
136                 BufferFormatTestParam params = new BufferFormatTestParam(
137                         ImageFormat.YUV_420_888, /*repeating*/true);
138                 bufferFormatTestByCamera(params);
139             } finally {
140                 closeDevice(id);
141             }
142         }
143     }
144 
145     @Test
testDepth16()146     public void testDepth16() throws Exception {
147         for (String id : mCameraIdsUnderTest) {
148             try {
149                 Log.i(TAG, "Testing Camera " + id);
150                 openDevice(id);
151                 BufferFormatTestParam params = new BufferFormatTestParam(
152                         ImageFormat.DEPTH16, /*repeating*/true);
153                 bufferFormatTestByCamera(params);
154             } finally {
155                 closeDevice(id);
156             }
157         }
158     }
159 
160     @Test
testDepthPointCloud()161     public void testDepthPointCloud() throws Exception {
162         for (String id : mCameraIdsUnderTest) {
163             try {
164                 Log.i(TAG, "Testing Camera " + id);
165                 openDevice(id);
166                 BufferFormatTestParam params = new BufferFormatTestParam(
167                         ImageFormat.DEPTH_POINT_CLOUD, /*repeating*/true);
168                 bufferFormatTestByCamera(params);
169             } finally {
170                 closeDevice(id);
171             }
172         }
173     }
174 
175     @Test
testDynamicDepth()176     public void testDynamicDepth() throws Exception {
177         for (String id : mCameraIdsUnderTest) {
178             try {
179                 openDevice(id);
180                 BufferFormatTestParam params = new BufferFormatTestParam(
181                         ImageFormat.DEPTH_JPEG, /*repeating*/true);
182                 params.mCheckSession = true;
183                 bufferFormatTestByCamera(params);
184             } finally {
185                 closeDevice(id);
186             }
187         }
188     }
189 
190     @Test
testY8()191     public void testY8() throws Exception {
192         for (String id : mCameraIdsUnderTest) {
193             try {
194                 Log.i(TAG, "Testing Camera " + id);
195                 openDevice(id);
196                 BufferFormatTestParam params = new BufferFormatTestParam(
197                         ImageFormat.Y8, /*repeating*/true);
198                 bufferFormatTestByCamera(params);
199             } finally {
200                 closeDevice(id);
201             }
202         }
203     }
204 
205     @Test
testJpeg()206     public void testJpeg() throws Exception {
207         for (String id : mCameraIdsUnderTest) {
208             try {
209                 Log.v(TAG, "Testing jpeg capture for Camera " + id);
210                 openDevice(id);
211                 BufferFormatTestParam params = new BufferFormatTestParam(
212                         ImageFormat.JPEG, /*repeating*/false);
213                 bufferFormatTestByCamera(params);
214             } finally {
215                 closeDevice(id);
216             }
217         }
218     }
219 
220     @Test
testRaw()221     public void testRaw() throws Exception {
222         for (String id : mCameraIdsUnderTest) {
223             try {
224                 Log.v(TAG, "Testing raw capture for camera " + id);
225                 openDevice(id);
226                 BufferFormatTestParam params = new BufferFormatTestParam(
227                         ImageFormat.RAW_SENSOR, /*repeating*/false);
228                 bufferFormatTestByCamera(params);
229             } finally {
230                 closeDevice(id);
231             }
232         }
233     }
234 
235     @Test
testRawPrivate()236     public void testRawPrivate() throws Exception {
237         for (String id : mCameraIdsUnderTest) {
238             try {
239                 Log.v(TAG, "Testing raw capture for camera " + id);
240                 openDevice(id);
241                 BufferFormatTestParam params = new BufferFormatTestParam(
242                         ImageFormat.RAW_PRIVATE, /*repeating*/false);
243                 bufferFormatTestByCamera(params);
244             } finally {
245                 closeDevice(id);
246             }
247         }
248     }
249 
250     @Test
testP010()251     public void testP010() throws Exception {
252         for (String id : mCameraIdsUnderTest) {
253             try {
254                 Log.v(TAG, "Testing YUV P010 capture for Camera " + id);
255                 openDevice(id);
256                 if (!mStaticInfo.isCapabilitySupported(CameraCharacteristics.
257                             REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) {
258                     continue;
259                 }
260                 Set<Long> availableProfiles =
261                     mStaticInfo.getAvailableDynamicRangeProfilesChecked();
262                 assertFalse("Absent dynamic range profiles", availableProfiles.isEmpty());
263                 assertTrue("HLG10 not present in the available dynamic range profiles",
264                         availableProfiles.contains(DynamicRangeProfiles.HLG10));
265 
266                 BufferFormatTestParam params = new BufferFormatTestParam(
267                         ImageFormat.YCBCR_P010, /*repeating*/false);
268                 params.mDynamicRangeProfile = DynamicRangeProfiles.HLG10;
269                 bufferFormatTestByCamera(params);
270             } finally {
271                 closeDevice(id);
272             }
273         }
274     }
275 
276     @Test
testDisplayP3Yuv()277     public void testDisplayP3Yuv() throws Exception {
278         for (String id : mCameraIdsUnderTest) {
279             try {
280                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
281                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
282                     continue;
283                 }
284                 Set<ColorSpace.Named> availableColorSpaces =
285                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(
286                                 ImageFormat.YUV_420_888);
287 
288                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
289                     continue;
290                 }
291 
292                 openDevice(id);
293                 Log.v(TAG, "Testing Display P3 Yuv capture for Camera " + id);
294                 BufferFormatTestParam params = new BufferFormatTestParam(
295                         ImageFormat.YUV_420_888, /*repeating*/false);
296                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
297                 params.mUseColorSpace = true;
298                 bufferFormatTestByCamera(params);
299             } finally {
300                 closeDevice(id);
301             }
302         }
303     }
304 
305     @Test
testDisplayP3YuvRepeating()306     public void testDisplayP3YuvRepeating() throws Exception {
307         for (String id : mCameraIdsUnderTest) {
308             try {
309                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
310                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
311                     continue;
312                 }
313                 Set<ColorSpace.Named> availableColorSpaces =
314                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(
315                                 ImageFormat.YUV_420_888);
316 
317                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
318                     continue;
319                 }
320 
321                 openDevice(id);
322                 Log.v(TAG, "Testing repeating Display P3 Yuv capture for Camera " + id);
323                 BufferFormatTestParam params = new BufferFormatTestParam(
324                         ImageFormat.YUV_420_888, /*repeating*/true);
325                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
326                 params.mUseColorSpace = true;
327                 bufferFormatTestByCamera(params);
328             } finally {
329                 closeDevice(id);
330             }
331         }
332     }
333 
334     @Test
testDisplayP3Heic()335     public void testDisplayP3Heic() throws Exception {
336         for (String id : mCameraIdsUnderTest) {
337             try {
338                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
339                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
340                     continue;
341                 }
342                 Set<ColorSpace.Named> availableColorSpaces =
343                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.HEIC);
344 
345                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
346                     continue;
347                 }
348 
349                 openDevice(id);
350                 Log.v(TAG, "Testing Display P3 HEIC capture for Camera " + id);
351                 BufferFormatTestParam params = new BufferFormatTestParam(
352                         ImageFormat.HEIC, /*repeating*/false);
353                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
354                 params.mUseColorSpace = true;
355                 bufferFormatTestByCamera(params);
356             } finally {
357                 closeDevice(id);
358             }
359         }
360     }
361 
362     @Test
testDisplayP3HeicRepeating()363     public void testDisplayP3HeicRepeating() throws Exception {
364         for (String id : mCameraIdsUnderTest) {
365             try {
366                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
367                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
368                     continue;
369                 }
370                 Set<ColorSpace.Named> availableColorSpaces =
371                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.HEIC);
372 
373                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
374                     continue;
375                 }
376 
377                 openDevice(id);
378                 Log.v(TAG, "Testing repeating Display P3 HEIC capture for Camera " + id);
379                 BufferFormatTestParam params = new BufferFormatTestParam(
380                         ImageFormat.HEIC, /*repeating*/true);
381                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
382                 params.mUseColorSpace = true;
383                 bufferFormatTestByCamera(params);
384             } finally {
385                 closeDevice(id);
386             }
387         }
388     }
389 
390     @Test
testDisplayP3Jpeg()391     public void testDisplayP3Jpeg() throws Exception {
392         for (String id : mCameraIdsUnderTest) {
393             try {
394                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
395                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
396                     continue;
397                 }
398                 Set<ColorSpace.Named> availableColorSpaces =
399                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.JPEG);
400 
401                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
402                     continue;
403                 }
404 
405                 openDevice(id);
406                 Log.v(TAG, "Testing Display P3 JPEG capture for Camera " + id);
407                 BufferFormatTestParam params = new BufferFormatTestParam(
408                         ImageFormat.JPEG, /*repeating*/false);
409                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
410                 params.mUseColorSpace = true;
411                 bufferFormatTestByCamera(params);
412             } finally {
413                 closeDevice(id);
414             }
415         }
416     }
417 
418     @Test
testDisplayP3JpegRepeating()419     public void testDisplayP3JpegRepeating() throws Exception {
420         for (String id : mCameraIdsUnderTest) {
421             try {
422                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
423                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
424                     continue;
425                 }
426                 Set<ColorSpace.Named> availableColorSpaces =
427                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.JPEG);
428 
429                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
430                     continue;
431                 }
432 
433                 openDevice(id);
434                 Log.v(TAG, "Testing repeating Display P3 JPEG capture for Camera " + id);
435                 BufferFormatTestParam params = new BufferFormatTestParam(
436                         ImageFormat.JPEG, /*repeating*/true);
437                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
438                 params.mUseColorSpace = true;
439                 bufferFormatTestByCamera(params);
440             } finally {
441                 closeDevice(id);
442             }
443         }
444     }
445 
446     @Test
testSRGBJpeg()447     public void testSRGBJpeg() throws Exception {
448         for (String id : mCameraIdsUnderTest) {
449             try {
450                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
451                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
452                     continue;
453                 }
454                 Set<ColorSpace.Named> availableColorSpaces =
455                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.JPEG);
456 
457                 if (!availableColorSpaces.contains(ColorSpace.Named.SRGB)) {
458                     continue;
459                 }
460 
461                 openDevice(id);
462                 Log.v(TAG, "Testing sRGB JPEG capture for Camera " + id);
463                 BufferFormatTestParam params = new BufferFormatTestParam(
464                         ImageFormat.JPEG, /*repeating*/false);
465                 params.mColorSpace = ColorSpace.Named.SRGB;
466                 params.mUseColorSpace = true;
467                 bufferFormatTestByCamera(params);
468             } finally {
469                 closeDevice(id);
470             }
471         }
472     }
473 
474     @Test
testSRGBJpegRepeating()475     public void testSRGBJpegRepeating() throws Exception {
476         for (String id : mCameraIdsUnderTest) {
477             try {
478                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
479                             .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
480                     continue;
481                 }
482                 Set<ColorSpace.Named> availableColorSpaces =
483                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(ImageFormat.JPEG);
484 
485                 if (!availableColorSpaces.contains(ColorSpace.Named.SRGB)) {
486                     continue;
487                 }
488 
489                 openDevice(id);
490                 Log.v(TAG, "Testing repeating sRGB JPEG capture for Camera " + id);
491                 BufferFormatTestParam params = new BufferFormatTestParam(
492                         ImageFormat.JPEG, /*repeating*/true);
493                 params.mColorSpace = ColorSpace.Named.SRGB;
494                 params.mUseColorSpace = true;
495                 bufferFormatTestByCamera(params);
496             } finally {
497                 closeDevice(id);
498             }
499         }
500     }
501 
502     @Test
testJpegR()503     public void testJpegR() throws Exception {
504         for (String id : mCameraIdsUnderTest) {
505             try {
506                 if (!mAllStaticInfo.get(id).isJpegRSupported()) {
507                     Log.i(TAG, "Camera " + id + " does not support Jpeg/R, skipping");
508                     continue;
509                 }
510                 Log.v(TAG, "Testing Jpeg/R capture for Camera " + id);
511 
512                 assertTrue(mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
513                         .REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT));
514 
515                 openDevice(id);
516                 BufferFormatTestParam params = new BufferFormatTestParam(
517                         ImageFormat.JPEG_R, /*repeating*/false);
518                 bufferFormatTestByCamera(params);
519             } finally {
520                 closeDevice(id);
521             }
522         }
523     }
524 
525     @Test
testJpegRDisplayP3()526     public void testJpegRDisplayP3() throws Exception {
527         for (String id : mCameraIdsUnderTest) {
528             try {
529                 if (!mAllStaticInfo.get(id).isJpegRSupported()) {
530                     Log.i(TAG, "Camera " + id + " does not support Jpeg/R, skipping");
531                     continue;
532                 }
533 
534                 if (!mAllStaticInfo.get(id).isCapabilitySupported(CameraCharacteristics
535                         .REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)) {
536                     continue;
537                 }
538                 Set<ColorSpace.Named> availableColorSpaces =
539                         mAllStaticInfo.get(id).getAvailableColorSpacesChecked(
540                                 ImageFormat.JPEG_R);
541 
542                 if (!availableColorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
543                     continue;
544                 }
545                 openDevice(id);
546                 Log.v(TAG, "Testing Display P3 Jpeg/R capture for Camera " + id);
547                 BufferFormatTestParam params = new BufferFormatTestParam(
548                         ImageFormat.JPEG_R, /*repeating*/false);
549                 params.mColorSpace = ColorSpace.Named.DISPLAY_P3;
550                 params.mUseColorSpace = true;
551                 params.mDynamicRangeProfile = DynamicRangeProfiles.HLG10;
552                 bufferFormatTestByCamera(params);
553             } finally {
554                 closeDevice(id);
555             }
556         }
557     }
558 
559     @Test
testHeic()560     public void testHeic() throws Exception {
561         for (String id : mCameraIdsUnderTest) {
562             try {
563                 Log.v(TAG, "Testing heic capture for Camera " + id);
564                 openDevice(id);
565                 BufferFormatTestParam params = new BufferFormatTestParam(
566                         ImageFormat.HEIC, /*repeating*/false);
567                 bufferFormatTestByCamera(params);
568             } finally {
569                 closeDevice(id);
570             }
571         }
572     }
573 
574     @Test
testRepeatingJpeg()575     public void testRepeatingJpeg() throws Exception {
576         for (String id : mCameraIdsUnderTest) {
577             try {
578                 Log.v(TAG, "Testing repeating jpeg capture for Camera " + id);
579                 openDevice(id);
580                 BufferFormatTestParam params = new BufferFormatTestParam(
581                         ImageFormat.JPEG, /*repeating*/true);
582                 bufferFormatTestByCamera(params);
583             } finally {
584                 closeDevice(id);
585             }
586         }
587     }
588 
589     @Test
testRepeatingRaw()590     public void testRepeatingRaw() throws Exception {
591         for (String id : mCameraIdsUnderTest) {
592             try {
593                 Log.v(TAG, "Testing repeating raw capture for camera " + id);
594                 openDevice(id);
595                 BufferFormatTestParam params = new BufferFormatTestParam(
596                         ImageFormat.RAW_SENSOR, /*repeating*/true);
597                 bufferFormatTestByCamera(params);
598             } finally {
599                 closeDevice(id);
600             }
601         }
602     }
603 
604     @Test
testRepeatingRawPrivate()605     public void testRepeatingRawPrivate() throws Exception {
606         for (String id : mCameraIdsUnderTest) {
607             try {
608                 Log.v(TAG, "Testing repeating raw capture for camera " + id);
609                 openDevice(id);
610                 BufferFormatTestParam params = new BufferFormatTestParam(
611                         ImageFormat.RAW_PRIVATE, /*repeating*/true);
612                 bufferFormatTestByCamera(params);
613             } finally {
614                 closeDevice(id);
615             }
616         }
617     }
618 
619     @Test
testRepeatingHeic()620     public void testRepeatingHeic() throws Exception {
621         for (String id : mCameraIdsUnderTest) {
622             try {
623                 Log.v(TAG, "Testing repeating heic capture for Camera " + id);
624                 openDevice(id);
625                 BufferFormatTestParam params = new BufferFormatTestParam(
626                         ImageFormat.HEIC, /*repeating*/true);
627                 bufferFormatTestByCamera(params);
628             } finally {
629                 closeDevice(id);
630             }
631         }
632     }
633 
634     @Test
testFlexibleYuvWithTimestampBase()635     public void testFlexibleYuvWithTimestampBase() throws Exception {
636         for (String id : mCameraIdsUnderTest) {
637             try {
638                 Log.i(TAG, "Testing Camera " + id);
639                 openDevice(id);
640 
641                 BufferFormatTestParam params = new BufferFormatTestParam(
642                         ImageFormat.YUV_420_888, /*repeating*/true);
643                 params.mValidateImageData = false;
644                 int[] timeBases = {OutputConfiguration.TIMESTAMP_BASE_SENSOR,
645                         OutputConfiguration.TIMESTAMP_BASE_MONOTONIC,
646                         OutputConfiguration.TIMESTAMP_BASE_REALTIME,
647                         OutputConfiguration.TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED};
648                 for (int timeBase : timeBases) {
649                     params.mTimestampBase = timeBase;
650                     bufferFormatTestByCamera(params);
651                 }
652             } finally {
653                 closeDevice(id);
654             }
655         }
656     }
657 
658     @Test
testLongProcessingRepeatingRaw()659     public void testLongProcessingRepeatingRaw() throws Exception {
660         for (String id : mCameraIdsUnderTest) {
661             try {
662                 Log.v(TAG, "Testing long processing on repeating raw for camera " + id);
663 
664                 if (!mAllStaticInfo.get(id).isCapabilitySupported(
665                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
666                     continue;
667                 }
668                 openDevice(id);
669 
670                 bufferFormatLongProcessingTimeTestByCamera(ImageFormat.RAW_SENSOR);
671             } finally {
672                 closeDevice(id);
673             }
674         }
675     }
676 
677     @Test
testLongProcessingRepeatingFlexibleYuv()678     public void testLongProcessingRepeatingFlexibleYuv() throws Exception {
679         for (String id : mCameraIdsUnderTest) {
680             try {
681                 Log.v(TAG, "Testing long processing on repeating YUV for camera " + id);
682 
683                 if (!mAllStaticInfo.get(id).isCapabilitySupported(
684                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
685                     continue;
686                 }
687 
688                 openDevice(id);
689                 bufferFormatLongProcessingTimeTestByCamera(ImageFormat.YUV_420_888);
690             } finally {
691                 closeDevice(id);
692             }
693         }
694     }
695 
696     /**
697      * Test invalid access of image after an image is closed, further access
698      * of the image will get an IllegalStateException. The basic assumption of
699      * this test is that the ImageReader always gives direct byte buffer, which is always true
700      * for camera case. For if the produced image byte buffer is not direct byte buffer, there
701      * is no guarantee to get an ISE for this invalid access case.
702      */
703     @Test
testInvalidAccessTest()704     public void testInvalidAccessTest() throws Exception {
705         // Test byte buffer access after an image is released, it should throw ISE.
706         for (String id : mCameraIdsUnderTest) {
707             try {
708                 Log.v(TAG, "Testing invalid image access for Camera " + id);
709                 openDevice(id);
710                 invalidAccessTestAfterClose();
711             } finally {
712                 closeDevice(id);
713                 closeDefaultImageReader();
714             }
715         }
716     }
717 
718     /**
719      * Test two image stream (YUV420_888 and JPEG) capture by using ImageReader.
720      *
721      * <p>Both stream formats are mandatory for Camera2 API</p>
722      */
723     @Test
testYuvAndJpeg()724     public void testYuvAndJpeg() throws Exception {
725         for (String id : mCameraIdsUnderTest) {
726             try {
727                 Log.v(TAG, "YUV and JPEG testing for camera " + id);
728                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
729                     Log.i(TAG, "Camera " + id +
730                             " does not support color outputs, skipping");
731                     continue;
732                 }
733                 openDevice(id);
734                 bufferFormatWithYuvTestByCamera(ImageFormat.JPEG);
735             } finally {
736                 closeDevice(id);
737             }
738         }
739     }
740 
741     /**
742      * Test two image stream (YUV420_888 and JPEG) capture by using ImageReader with the ImageReader
743      * factory method that has usage flag argument.
744      *
745      * <p>Both stream formats are mandatory for Camera2 API</p>
746      */
747     @Test
testYuvAndJpegWithUsageFlag()748     public void testYuvAndJpegWithUsageFlag() throws Exception {
749         for (String id : mCameraIdsUnderTest) {
750             try {
751                 Log.v(TAG, "YUV and JPEG testing for camera " + id);
752                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
753                     Log.i(TAG, "Camera " + id +
754                             " does not support color outputs, skipping");
755                     continue;
756                 }
757                 openDevice(id);
758                 bufferFormatWithYuvTestByCamera(ImageFormat.JPEG, true);
759             } finally {
760                 closeDevice(id);
761             }
762         }
763     }
764 
765     @Test
testImageReaderBuilderSetHardwareBufferFormatAndDataSpace()766     public void testImageReaderBuilderSetHardwareBufferFormatAndDataSpace() throws Exception {
767         long usage = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | HardwareBuffer.USAGE_GPU_COLOR_OUTPUT;
768         try (
769             ImageReader reader = new ImageReader
770                 .Builder(20, 45)
771                 .setMaxImages(2)
772                 .setDefaultHardwareBufferFormat(HardwareBuffer.RGBA_8888)
773                 .setDefaultDataSpace(DataSpace.DATASPACE_BT709)
774                 .setUsage(usage)
775                 .build();
776             ImageWriter writer = ImageWriter.newInstance(reader.getSurface(), 1);
777             Image outputImage = writer.dequeueInputImage()
778         ) {
779             assertEquals(2, reader.getMaxImages());
780             assertEquals(usage, reader.getUsage());
781             assertEquals(HardwareBuffer.RGBA_8888, reader.getHardwareBufferFormat());
782 
783             assertEquals(20, outputImage.getWidth());
784             assertEquals(45, outputImage.getHeight());
785             assertEquals(HardwareBuffer.RGBA_8888, outputImage.getFormat());
786         }
787     }
788 
789     @Test
testImageReaderBuilderWithBLOBAndHEIF()790     public void testImageReaderBuilderWithBLOBAndHEIF() throws Exception {
791         long usage = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | HardwareBuffer.USAGE_GPU_COLOR_OUTPUT;
792         try (
793             ImageReader reader = new ImageReader
794                 .Builder(20, 45)
795                 .setMaxImages(2)
796                 .setDefaultHardwareBufferFormat(HardwareBuffer.BLOB)
797                 .setDefaultDataSpace(DataSpace.DATASPACE_HEIF)
798                 .setUsage(usage)
799                 .build();
800             ImageWriter writer = new ImageWriter.Builder(reader.getSurface()).build();
801         ) {
802             assertEquals(2, reader.getMaxImages());
803             assertEquals(usage, reader.getUsage());
804             assertEquals(HardwareBuffer.BLOB, reader.getHardwareBufferFormat());
805             assertEquals(DataSpace.DATASPACE_HEIF, reader.getDataSpace());
806             // writer should have same dataspace/hardwarebuffer format as reader.
807             assertEquals(HardwareBuffer.BLOB, writer.getHardwareBufferFormat());
808             assertEquals(DataSpace.DATASPACE_HEIF, writer.getDataSpace());
809             // HEIC is the combination of HardwareBuffer.BLOB and Dataspace.DATASPACE_HEIF
810             assertEquals(ImageFormat.HEIC, writer.getFormat());
811         }
812     }
813 
814     @Test
testImageReaderBuilderWithBLOBAndJpegR()815     public void testImageReaderBuilderWithBLOBAndJpegR() throws Exception {
816         long usage = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | HardwareBuffer.USAGE_GPU_COLOR_OUTPUT;
817         try (
818                 ImageReader reader = new ImageReader
819                         .Builder(20, 45)
820                         .setMaxImages(2)
821                         .setDefaultHardwareBufferFormat(HardwareBuffer.BLOB)
822                         .setDefaultDataSpace(DataSpace.DATASPACE_JPEG_R)
823                         .setUsage(usage)
824                         .build();
825                 ImageWriter writer = new ImageWriter.Builder(reader.getSurface()).build();
826         ) {
827             assertEquals(2, reader.getMaxImages());
828             assertEquals(usage, reader.getUsage());
829             assertEquals(HardwareBuffer.BLOB, reader.getHardwareBufferFormat());
830             assertEquals(DataSpace.DATASPACE_JPEG_R, reader.getDataSpace());
831             // writer should have same dataspace/hardwarebuffer format as reader.
832             assertEquals(HardwareBuffer.BLOB, writer.getHardwareBufferFormat());
833             assertEquals(DataSpace.DATASPACE_JPEG_R, writer.getDataSpace());
834             // Jpeg/R is the combination of HardwareBuffer.BLOB and Dataspace.DATASPACE_JPEG_R
835             assertEquals(ImageFormat.JPEG_R, writer.getFormat());
836         }
837     }
838 
839     @Test
testImageReaderBuilderWithBLOBAndJFIF()840     public void testImageReaderBuilderWithBLOBAndJFIF() throws Exception {
841         long usage = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE | HardwareBuffer.USAGE_GPU_COLOR_OUTPUT;
842         try (
843             ImageReader reader = new ImageReader
844                 .Builder(20, 45)
845                 .setMaxImages(2)
846                 .setDefaultHardwareBufferFormat(HardwareBuffer.BLOB)
847                 .setDefaultDataSpace(DataSpace.DATASPACE_JFIF)
848                 .setUsage(usage)
849                 .build();
850             ImageWriter writer = new ImageWriter.Builder(reader.getSurface()).build();
851         ) {
852             assertEquals(2, reader.getMaxImages());
853             assertEquals(usage, reader.getUsage());
854             assertEquals(HardwareBuffer.BLOB, reader.getHardwareBufferFormat());
855             assertEquals(DataSpace.DATASPACE_JFIF, reader.getDataSpace());
856             // writer should have same dataspace/hardwarebuffer format as reader.
857             assertEquals(HardwareBuffer.BLOB, writer.getHardwareBufferFormat());
858             assertEquals(DataSpace.DATASPACE_JFIF, writer.getDataSpace());
859             // JPEG is the combination of HardwareBuffer.BLOB and Dataspace.DATASPACE_JFIF
860             assertEquals(ImageFormat.JPEG, writer.getFormat());
861         }
862     }
863 
864     @Test
testImageReaderBuilderImageFormatOverride()865     public void testImageReaderBuilderImageFormatOverride() throws Exception {
866         try (
867             ImageReader reader = new ImageReader
868                 .Builder(20, 45)
869                 .setImageFormat(ImageFormat.HEIC)
870                 .setDefaultHardwareBufferFormat(HardwareBuffer.RGB_888)
871                 .setDefaultDataSpace(DataSpace.DATASPACE_BT709)
872                 .build();
873             ImageWriter writer = ImageWriter.newInstance(reader.getSurface(), 1);
874             Image outputImage = writer.dequeueInputImage()
875         ) {
876             assertEquals(1, reader.getMaxImages());
877             assertEquals(HardwareBuffer.USAGE_CPU_READ_OFTEN, reader.getUsage());
878             assertEquals(HardwareBuffer.RGB_888, reader.getHardwareBufferFormat());
879             assertEquals(DataSpace.DATASPACE_BT709, reader.getDataSpace());
880 
881             assertEquals(20, outputImage.getWidth());
882             assertEquals(45, outputImage.getHeight());
883             assertEquals(HardwareBuffer.RGB_888, outputImage.getFormat());
884         }
885     }
886 
887     @Test
testImageReaderBuilderSetImageFormat()888     public void testImageReaderBuilderSetImageFormat() throws Exception {
889         try (
890             ImageReader reader = new ImageReader
891                 .Builder(20, 45)
892                 .setMaxImages(2)
893                 .setImageFormat(ImageFormat.YUV_420_888)
894                 .build();
895             ImageWriter writer = ImageWriter.newInstance(reader.getSurface(), 1);
896             Image outputImage = writer.dequeueInputImage()
897         ) {
898             assertEquals(2, reader.getMaxImages());
899             assertEquals(ImageFormat.YUV_420_888, reader.getImageFormat());
900             assertEquals(HardwareBuffer.USAGE_CPU_READ_OFTEN, reader.getUsage());
901             // ImageFormat.YUV_420_888 hal dataspace is DATASPACE_JFIF
902             assertEquals(DataSpace.DATASPACE_JFIF, reader.getDataSpace());
903 
904             // writer should retrieve all info from reader's surface
905             assertEquals(DataSpace.DATASPACE_JFIF, writer.getDataSpace());
906             assertEquals(HardwareBuffer.YCBCR_420_888, writer.getHardwareBufferFormat());
907 
908             assertEquals(20, outputImage.getWidth());
909             assertEquals(45, outputImage.getHeight());
910             assertEquals(ImageFormat.YUV_420_888, outputImage.getFormat());
911         }
912     }
913 
914     /**
915      * Test two image stream (YUV420_888 and RAW_SENSOR) capture by using ImageReader.
916      *
917      */
918     @Test
testImageReaderYuvAndRaw()919     public void testImageReaderYuvAndRaw() throws Exception {
920         for (String id : mCameraIdsUnderTest) {
921             try {
922                 Log.v(TAG, "YUV and RAW testing for camera " + id);
923                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
924                     Log.i(TAG, "Camera " + id +
925                             " does not support color outputs, skipping");
926                     continue;
927                 }
928                 openDevice(id);
929                 bufferFormatWithYuvTestByCamera(ImageFormat.RAW_SENSOR);
930             } finally {
931                 closeDevice(id);
932             }
933         }
934     }
935 
936     /**
937      * If the camera device advertises the SECURE_IAMGE_DATA capability, test
938      * ImageFormat.PRIVATE + PROTECTED usage capture by using ImageReader with the
939      * ImageReader factory method that has usage flag argument, and uses a custom usage flag.
940      */
941     @Test
testImageReaderPrivateWithProtectedUsageFlag()942     public void testImageReaderPrivateWithProtectedUsageFlag() throws Exception {
943         for (String id : mCameraIdsUnderTest) {
944             try {
945                 Log.v(TAG, "Private format and protected usage testing for camera " + id);
946                 List<String> testCameraIds = new ArrayList<>();
947 
948                 if (mAllStaticInfo.get(id).isCapabilitySupported(
949                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA)) {
950                     // Test the camera id without using physical camera
951                     testCameraIds.add(null);
952                 }
953 
954                 if (mAllStaticInfo.get(id).isLogicalMultiCamera()) {
955                     Set<String> physicalIdsSet =
956                         mAllStaticInfo.get(id).getCharacteristics().getPhysicalCameraIds();
957                     for (String physicalId : physicalIdsSet) {
958                         if (mAllStaticInfo.get(physicalId).isCapabilitySupported(
959                                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA)) {
960                             testCameraIds.add(physicalId);
961                         }
962                     }
963                 }
964 
965                 if (testCameraIds.isEmpty()) {
966                     Log.i(TAG, "Camera " + id +
967                             " does not support secure image data capability, skipping");
968                     continue;
969                 }
970                 openDevice(id);
971 
972 
973                 BufferFormatTestParam params = new BufferFormatTestParam(
974                         ImageFormat.PRIVATE, /*repeating*/true);
975                 params.mSetUsageFlag = true;
976                 params.mUsageFlag = HardwareBuffer.USAGE_PROTECTED_CONTENT;
977                 params.mRepeating = true;
978                 params.mCheckSession = true;
979                 params.mValidateImageData = false;
980                 for (String testCameraId : testCameraIds) {
981                     params.mPhysicalId = testCameraId;
982                     bufferFormatTestByCamera(params);
983                 }
984             } finally {
985                 closeDevice(id);
986             }
987         }
988     }
989 
990     /**
991      * Test two image stream (YUV420_888 and RAW_SENSOR) capture by using ImageReader with the
992      * ImageReader factory method that has usage flag argument.
993      *
994      */
995     @Test
testImageReaderYuvAndRawWithUsageFlag()996     public void testImageReaderYuvAndRawWithUsageFlag() throws Exception {
997         for (String id : mCameraIdsUnderTest) {
998             try {
999                 Log.v(TAG, "YUV and RAW testing for camera " + id);
1000                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
1001                     Log.i(TAG, "Camera " + id +
1002                             " does not support color outputs, skipping");
1003                     continue;
1004                 }
1005                 openDevice(id);
1006                 bufferFormatWithYuvTestByCamera(ImageFormat.RAW_SENSOR, true);
1007             } finally {
1008                 closeDevice(id);
1009             }
1010         }
1011     }
1012 
1013     /**
1014      * Check that the center patches for YUV and JPEG outputs for the same frame match for each YUV
1015      * resolution and format supported.
1016      */
1017     @Test
testAllOutputYUVResolutions()1018     public void testAllOutputYUVResolutions() throws Exception {
1019         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
1020                 BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
1021         for (String id : mCameraIdsUnderTest) {
1022             try {
1023                 Log.v(TAG, "Testing all YUV image resolutions for camera " + id);
1024 
1025                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
1026                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
1027                     continue;
1028                 }
1029 
1030                 openDevice(id);
1031                 // Skip warmup on FULL mode devices.
1032                 int warmupCaptureNumber = (mStaticInfo.isHardwareLevelLegacy()) ?
1033                         MAX_NUM_IMAGES - 1 : 0;
1034 
1035                 // NV21 isn't supported by ImageReader.
1036                 final int[] YUVFormats = new int[] {ImageFormat.YUV_420_888, ImageFormat.YV12};
1037 
1038                 CameraCharacteristics.Key<StreamConfigurationMap> key =
1039                         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
1040                 StreamConfigurationMap config = mStaticInfo.getValueFromKeyNonNull(key);
1041                 int[] supportedFormats = config.getOutputFormats();
1042                 List<Integer> supportedYUVFormats = new ArrayList<>();
1043                 for (int format : YUVFormats) {
1044                     if (CameraTestUtils.contains(supportedFormats, format)) {
1045                         supportedYUVFormats.add(format);
1046                     }
1047                 }
1048 
1049                 Size[] jpegSizes = mStaticInfo.getAvailableSizesForFormatChecked(ImageFormat.JPEG,
1050                         StaticMetadata.StreamDirection.Output);
1051                 assertFalse("JPEG output not supported for camera " + id +
1052                         ", at least one JPEG output is required.", jpegSizes.length == 0);
1053 
1054                 Size maxJpegSize = CameraTestUtils.getMaxSize(jpegSizes);
1055                 Size maxPreviewSize = mOrderedPreviewSizes.get(0);
1056                 Size QCIF = new Size(176, 144);
1057                 Size FULL_HD = new Size(1920, 1080);
1058                 for (int format : supportedYUVFormats) {
1059                     Size[] targetCaptureSizes =
1060                             mStaticInfo.getAvailableSizesForFormatChecked(format,
1061                             StaticMetadata.StreamDirection.Output);
1062 
1063                     for (Size captureSz : targetCaptureSizes) {
1064                         if (VERBOSE) {
1065                             Log.v(TAG, "Testing yuv size " + captureSz + " and jpeg size "
1066                                     + maxJpegSize + " for camera " + mCamera.getId());
1067                         }
1068 
1069                         ImageReader jpegReader = null;
1070                         ImageReader yuvReader = null;
1071                         try {
1072                             // Create YUV image reader
1073                             SimpleImageReaderListener yuvListener = new SimpleImageReaderListener();
1074                             yuvReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
1075                                     yuvListener);
1076                             Surface yuvSurface = yuvReader.getSurface();
1077 
1078                             // Create JPEG image reader
1079                             SimpleImageReaderListener jpegListener =
1080                                     new SimpleImageReaderListener();
1081                             jpegReader = createImageReader(maxJpegSize,
1082                                     ImageFormat.JPEG, MAX_NUM_IMAGES, jpegListener);
1083                             Surface jpegSurface = jpegReader.getSurface();
1084 
1085                             // Setup session
1086                             List<Surface> outputSurfaces = new ArrayList<Surface>();
1087                             outputSurfaces.add(yuvSurface);
1088                             outputSurfaces.add(jpegSurface);
1089                             createSession(outputSurfaces);
1090 
1091                             int state = mCameraSessionListener.getStateWaiter().waitForAnyOfStates(
1092                                         Arrays.asList(sessionStates),
1093                                         CameraTestUtils.SESSION_CONFIGURE_TIMEOUT_MS);
1094 
1095                             if (state == BlockingSessionCallback.SESSION_CONFIGURE_FAILED) {
1096                                 if (captureSz.getWidth() > maxPreviewSize.getWidth() ||
1097                                         captureSz.getHeight() > maxPreviewSize.getHeight()) {
1098                                     Log.v(TAG, "Skip testing {yuv:" + captureSz
1099                                             + " ,jpeg:" + maxJpegSize + "} for camera "
1100                                             + mCamera.getId() +
1101                                             " because full size jpeg + yuv larger than "
1102                                             + "max preview size (" + maxPreviewSize
1103                                             + ") is not supported");
1104                                     continue;
1105                                 } else if (captureSz.equals(QCIF) &&
1106                                         ((maxJpegSize.getWidth() > FULL_HD.getWidth()) ||
1107                                          (maxJpegSize.getHeight() > FULL_HD.getHeight()))) {
1108                                     Log.v(TAG, "Skip testing {yuv:" + captureSz
1109                                             + " ,jpeg:" + maxJpegSize + "} for camera "
1110                                             + mCamera.getId() +
1111                                             " because QCIF + >Full_HD size is not supported");
1112                                     continue;
1113                                 } else {
1114                                     fail("Camera " + mCamera.getId() +
1115                                             ":session configuration failed for {jpeg: " +
1116                                             maxJpegSize + ", yuv: " + captureSz + "}");
1117                                 }
1118                             }
1119 
1120                             // Warm up camera preview (mainly to give legacy devices time to do 3A).
1121                             CaptureRequest.Builder warmupRequest =
1122                                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1123                             warmupRequest.addTarget(yuvSurface);
1124                             assertNotNull("Fail to get CaptureRequest.Builder", warmupRequest);
1125                             SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1126 
1127                             for (int i = 0; i < warmupCaptureNumber; i++) {
1128                                 startCapture(warmupRequest.build(), /*repeating*/false,
1129                                         resultListener, mHandler);
1130                             }
1131                             for (int i = 0; i < warmupCaptureNumber; i++) {
1132                                 resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
1133                                 Image image = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
1134                                 image.close();
1135                             }
1136 
1137                             // Capture image.
1138                             CaptureRequest.Builder mainRequest =
1139                                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
1140                             for (Surface s : outputSurfaces) {
1141                                 mainRequest.addTarget(s);
1142                             }
1143 
1144                             startCapture(mainRequest.build(), /*repeating*/false, resultListener,
1145                                     mHandler);
1146 
1147                             // Verify capture result and images
1148                             resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
1149 
1150                             Image yuvImage = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
1151                             Image jpegImage = jpegListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
1152 
1153                             //Validate captured images.
1154                             CameraTestUtils.validateImage(yuvImage, captureSz.getWidth(),
1155                                     captureSz.getHeight(), format, /*filePath*/null);
1156                             CameraTestUtils.validateImage(jpegImage, maxJpegSize.getWidth(),
1157                                     maxJpegSize.getHeight(), ImageFormat.JPEG, /*filePath*/null);
1158 
1159                             // Compare the image centers.
1160                             RectF jpegDimens = new RectF(0, 0, jpegImage.getWidth(),
1161                                     jpegImage.getHeight());
1162                             RectF yuvDimens = new RectF(0, 0, yuvImage.getWidth(),
1163                                     yuvImage.getHeight());
1164 
1165                             // Find scale difference between YUV and JPEG output
1166                             Matrix m = new Matrix();
1167                             m.setRectToRect(yuvDimens, jpegDimens, Matrix.ScaleToFit.START);
1168                             RectF scaledYuv = new RectF();
1169                             m.mapRect(scaledYuv, yuvDimens);
1170                             float scale = scaledYuv.width() / yuvDimens.width();
1171 
1172                             final int PATCH_DIMEN = 40; // pixels in YUV
1173 
1174                             // Find matching square patch of pixels in YUV and JPEG output
1175                             RectF tempPatch = new RectF(0, 0, PATCH_DIMEN, PATCH_DIMEN);
1176                             tempPatch.offset(yuvDimens.centerX() - tempPatch.centerX(),
1177                                     yuvDimens.centerY() - tempPatch.centerY());
1178                             Rect yuvPatch = new Rect();
1179                             tempPatch.roundOut(yuvPatch);
1180 
1181                             tempPatch.set(0, 0, PATCH_DIMEN * scale, PATCH_DIMEN * scale);
1182                             tempPatch.offset(jpegDimens.centerX() - tempPatch.centerX(),
1183                                     jpegDimens.centerY() - tempPatch.centerY());
1184                             Rect jpegPatch = new Rect();
1185                             tempPatch.roundOut(jpegPatch);
1186 
1187                             // Decode center patches
1188                             int[] yuvColors = convertPixelYuvToRgba(yuvPatch.width(),
1189                                     yuvPatch.height(), yuvPatch.left, yuvPatch.top, yuvImage);
1190                             Bitmap yuvBmap = Bitmap.createBitmap(yuvColors, yuvPatch.width(),
1191                                     yuvPatch.height(), Bitmap.Config.ARGB_8888);
1192 
1193                             byte[] compressedJpegData = CameraTestUtils.getDataFromImage(jpegImage);
1194                             BitmapRegionDecoder decoder = BitmapRegionDecoder.newInstance(
1195                                     compressedJpegData, /*offset*/0, compressedJpegData.length,
1196                                     /*isShareable*/true);
1197                             BitmapFactory.Options opt = new BitmapFactory.Options();
1198                             opt.inPreferredConfig = Bitmap.Config.ARGB_8888;
1199                             Bitmap fullSizeJpegBmap = decoder.decodeRegion(jpegPatch, opt);
1200                             Bitmap jpegBmap = Bitmap.createScaledBitmap(fullSizeJpegBmap,
1201                                     yuvPatch.width(), yuvPatch.height(), /*filter*/true);
1202 
1203                             // Compare two patches using average of per-pixel differences
1204                             double difference = BitmapUtils.calcDifferenceMetric(yuvBmap, jpegBmap);
1205                             double tolerance = IMAGE_DIFFERENCE_TOLERANCE;
1206                             if (mStaticInfo.isHardwareLevelLegacy()) {
1207                                 tolerance = IMAGE_DIFFERENCE_TOLERANCE_LEGACY;
1208                             }
1209                             Log.i(TAG, "Difference for resolution " + captureSz + " is: " +
1210                                     difference);
1211                             if (difference > tolerance) {
1212                                 // Dump files if running in verbose mode
1213                                 if (DEBUG) {
1214                                     String jpegFileName = mDebugFileNameBase + "/" + captureSz +
1215                                             "_jpeg.jpg";
1216                                     dumpFile(jpegFileName, jpegBmap);
1217                                     String fullSizeJpegFileName = mDebugFileNameBase + "/" +
1218                                             captureSz + "_full_jpeg.jpg";
1219                                     dumpFile(fullSizeJpegFileName, compressedJpegData);
1220                                     String yuvFileName = mDebugFileNameBase + "/" + captureSz +
1221                                             "_yuv.jpg";
1222                                     dumpFile(yuvFileName, yuvBmap);
1223                                     String fullSizeYuvFileName = mDebugFileNameBase + "/" +
1224                                             captureSz + "_full_yuv.jpg";
1225                                     int[] fullYUVColors = convertPixelYuvToRgba(yuvImage.getWidth(),
1226                                             yuvImage.getHeight(), 0, 0, yuvImage);
1227                                     Bitmap fullYUVBmap = Bitmap.createBitmap(fullYUVColors,
1228                                             yuvImage.getWidth(), yuvImage.getHeight(),
1229                                             Bitmap.Config.ARGB_8888);
1230                                     dumpFile(fullSizeYuvFileName, fullYUVBmap);
1231                                 }
1232                                 fail("Camera " + mCamera.getId() + ": YUV image at capture size "
1233                                         + captureSz + " and JPEG image at capture size "
1234                                         + maxJpegSize + " for the same frame are not similar,"
1235                                         + " center patches have difference metric of "
1236                                         + difference + ", tolerance is " + tolerance);
1237                             }
1238 
1239                             // Stop capture, delete the streams.
1240                             stopCapture(/*fast*/false);
1241                             yuvImage.close();
1242                             jpegImage.close();
1243                             yuvListener.drain();
1244                             jpegListener.drain();
1245                         } finally {
1246                             closeImageReader(jpegReader);
1247                             jpegReader = null;
1248                             closeImageReader(yuvReader);
1249                             yuvReader = null;
1250                         }
1251                     }
1252                 }
1253 
1254             } finally {
1255                 closeDevice(id);
1256             }
1257         }
1258     }
1259 
1260     /**
1261      * Test that images captured after discarding free buffers are valid.
1262      */
1263     @Test
testDiscardFreeBuffers()1264     public void testDiscardFreeBuffers() throws Exception {
1265         for (String id : mCameraIdsUnderTest) {
1266             try {
1267                 Log.v(TAG, "Testing discardFreeBuffers for Camera " + id);
1268                 openDevice(id);
1269                 discardFreeBuffersTestByCamera();
1270             } finally {
1271                 closeDevice(id);
1272             }
1273         }
1274     }
1275 
1276     /** Tests that usage bits are preserved */
1277     @Test
testUsageRespected()1278     public void testUsageRespected() throws Exception {
1279         final long REQUESTED_USAGE_BITS =
1280                 HardwareBuffer.USAGE_GPU_COLOR_OUTPUT | HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE;
1281         ImageReader reader = ImageReader.newInstance(1, 1, PixelFormat.RGBA_8888, 1,
1282                 REQUESTED_USAGE_BITS);
1283         Surface surface = reader.getSurface();
1284         Canvas canvas = surface.lockHardwareCanvas();
1285         canvas.drawColor(Color.RED);
1286         surface.unlockCanvasAndPost(canvas);
1287         Image image = null;
1288         for (int i = 0; i < 100; i++) {
1289             image = reader.acquireNextImage();
1290             if (image != null) break;
1291             Thread.sleep(10);
1292         }
1293         assertNotNull(image);
1294         HardwareBuffer buffer = image.getHardwareBuffer();
1295         assertNotNull(buffer);
1296         // Mask off the upper vendor bits
1297         int myBits = (int) (buffer.getUsage() & 0xFFFFFFF);
1298         assertWithMessage("Usage bits %s did not contain requested usage bits %s", myBits,
1299                 REQUESTED_USAGE_BITS).that(myBits & REQUESTED_USAGE_BITS)
1300                         .isEqualTo(REQUESTED_USAGE_BITS);
1301     }
1302 
testLandscapeToPortraitOverride(boolean overrideToPortrait)1303     private void testLandscapeToPortraitOverride(boolean overrideToPortrait) throws Exception {
1304         if (!SystemProperties.getBoolean(CameraManager.LANDSCAPE_TO_PORTRAIT_PROP, false)) {
1305             Log.i(TAG, "Landscape to portrait override not supported, skipping test");
1306             return;
1307         }
1308 
1309         for (String id : mCameraIdsUnderTest) {
1310             CameraCharacteristics c = mCameraManager.getCameraCharacteristics(
1311                     id, /*overrideToPortrait*/false);
1312             int[] modes = c.get(CameraCharacteristics.SCALER_AVAILABLE_ROTATE_AND_CROP_MODES);
1313             boolean supportsRotateAndCrop = false;
1314             for (int mode : modes) {
1315                 if (mode == CameraMetadata.SCALER_ROTATE_AND_CROP_90
1316                         || mode == CameraMetadata.SCALER_ROTATE_AND_CROP_270) {
1317                     supportsRotateAndCrop = true;
1318                     break;
1319                 }
1320             }
1321 
1322             if (!supportsRotateAndCrop) {
1323                 Log.i(TAG, "Skipping non-rotate-and-crop cameraId " + id);
1324                 continue;
1325             }
1326 
1327             int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
1328             if (sensorOrientation != 0 && sensorOrientation != 180) {
1329                 Log.i(TAG, "Skipping portrait orientation sensor cameraId " + id);
1330                 continue;
1331             }
1332 
1333             Log.i(TAG, "Testing overrideToPortrait " + overrideToPortrait
1334                     + " for Camera " + id);
1335 
1336             if (overrideToPortrait) {
1337                 c = mCameraManager.getCameraCharacteristics(id, overrideToPortrait);
1338                 sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
1339                 assertTrue("SENSOR_ORIENTATION should imply portrait sensor.",
1340                         sensorOrientation == 90 || sensorOrientation == 270);
1341             }
1342 
1343             BufferFormatTestParam params = new BufferFormatTestParam(
1344                     ImageFormat.JPEG, /*repeating*/false);
1345             params.mValidateImageData = true;
1346 
1347             try {
1348                 openDevice(id, overrideToPortrait);
1349                 bufferFormatTestByCamera(params);
1350             } finally {
1351                 closeDevice(id);
1352             }
1353         }
1354     }
1355 
1356     @Test
testLandscapeToPortraitOverrideEnabled()1357     public void testLandscapeToPortraitOverrideEnabled() throws Exception {
1358         testLandscapeToPortraitOverride(true);
1359     }
1360 
1361     @Test
testLandscapeToPortraitOverrideDisabled()1362     public void testLandscapeToPortraitOverrideDisabled() throws Exception {
1363         testLandscapeToPortraitOverride(false);
1364     }
1365 
1366     /**
1367      * Convert a rectangular patch in a YUV image to an ARGB color array.
1368      *
1369      * @param w width of the patch.
1370      * @param h height of the patch.
1371      * @param wOffset offset of the left side of the patch.
1372      * @param hOffset offset of the top of the patch.
1373      * @param yuvImage a YUV image to select a patch from.
1374      * @return the image patch converted to RGB as an ARGB color array.
1375      */
convertPixelYuvToRgba(int w, int h, int wOffset, int hOffset, Image yuvImage)1376     private static int[] convertPixelYuvToRgba(int w, int h, int wOffset, int hOffset,
1377                                                Image yuvImage) {
1378         final int CHANNELS = 3; // yuv
1379         final float COLOR_RANGE = 255f;
1380 
1381         assertTrue("Invalid argument to convertPixelYuvToRgba",
1382                 w > 0 && h > 0 && wOffset >= 0 && hOffset >= 0);
1383         assertNotNull(yuvImage);
1384 
1385         int imageFormat = yuvImage.getFormat();
1386         assertTrue("YUV image must have YUV-type format",
1387                 imageFormat == ImageFormat.YUV_420_888 || imageFormat == ImageFormat.YV12 ||
1388                         imageFormat == ImageFormat.NV21);
1389 
1390         int height = yuvImage.getHeight();
1391         int width = yuvImage.getWidth();
1392 
1393         Rect imageBounds = new Rect(/*left*/0, /*top*/0, /*right*/width, /*bottom*/height);
1394         Rect crop = new Rect(/*left*/wOffset, /*top*/hOffset, /*right*/wOffset + w,
1395                 /*bottom*/hOffset + h);
1396         assertTrue("Output rectangle" + crop + " must lie within image bounds " + imageBounds,
1397                 imageBounds.contains(crop));
1398         Image.Plane[] planes = yuvImage.getPlanes();
1399 
1400         Image.Plane yPlane = planes[0];
1401         Image.Plane cbPlane = planes[1];
1402         Image.Plane crPlane = planes[2];
1403 
1404         ByteBuffer yBuf = yPlane.getBuffer();
1405         int yPixStride = yPlane.getPixelStride();
1406         int yRowStride = yPlane.getRowStride();
1407         ByteBuffer cbBuf = cbPlane.getBuffer();
1408         int cbPixStride = cbPlane.getPixelStride();
1409         int cbRowStride = cbPlane.getRowStride();
1410         ByteBuffer crBuf = crPlane.getBuffer();
1411         int crPixStride = crPlane.getPixelStride();
1412         int crRowStride = crPlane.getRowStride();
1413 
1414         int[] output = new int[w * h];
1415 
1416         // TODO: Optimize this with renderscript intrinsics
1417         byte[] yRow = new byte[yPixStride * (w - 1) + 1];
1418         byte[] cbRow = new byte[cbPixStride * (w / 2 - 1) + 1];
1419         byte[] crRow = new byte[crPixStride * (w / 2 - 1) + 1];
1420         yBuf.mark();
1421         cbBuf.mark();
1422         crBuf.mark();
1423         int initialYPos = yBuf.position();
1424         int initialCbPos = cbBuf.position();
1425         int initialCrPos = crBuf.position();
1426         int outputPos = 0;
1427         for (int i = hOffset; i < hOffset + h; i++) {
1428             yBuf.position(initialYPos + i * yRowStride + wOffset * yPixStride);
1429             yBuf.get(yRow);
1430             if ((i & 1) == (hOffset & 1)) {
1431                 cbBuf.position(initialCbPos + (i / 2) * cbRowStride + wOffset * cbPixStride / 2);
1432                 cbBuf.get(cbRow);
1433                 crBuf.position(initialCrPos + (i / 2) * crRowStride + wOffset * crPixStride / 2);
1434                 crBuf.get(crRow);
1435             }
1436             for (int j = 0, yPix = 0, crPix = 0, cbPix = 0; j < w; j++, yPix += yPixStride) {
1437                 float y = yRow[yPix] & 0xFF;
1438                 float cb = cbRow[cbPix] & 0xFF;
1439                 float cr = crRow[crPix] & 0xFF;
1440 
1441                 // convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section)
1442                 int r = (int) Math.max(0.0f, Math.min(COLOR_RANGE, y + 1.402f * (cr - 128)));
1443                 int g = (int) Math.max(0.0f,
1444                         Math.min(COLOR_RANGE, y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128)));
1445                 int b = (int) Math.max(0.0f, Math.min(COLOR_RANGE, y + 1.772f * (cb - 128)));
1446 
1447                 // Convert to ARGB pixel color (use opaque alpha)
1448                 output[outputPos++] = Color.rgb(r, g, b);
1449 
1450                 if ((j & 1) == 1) {
1451                     crPix += crPixStride;
1452                     cbPix += cbPixStride;
1453                 }
1454             }
1455         }
1456         yBuf.rewind();
1457         cbBuf.rewind();
1458         crBuf.rewind();
1459 
1460         return output;
1461     }
1462 
1463     /**
1464      * Test capture a given format stream with yuv stream simultaneously.
1465      *
1466      * <p>Use fixed yuv size, varies targeted format capture size. Single capture is tested.</p>
1467      *
1468      * @param format The capture format to be tested along with yuv format.
1469      */
bufferFormatWithYuvTestByCamera(int format)1470     private void bufferFormatWithYuvTestByCamera(int format) throws Exception {
1471         bufferFormatWithYuvTestByCamera(format, false);
1472     }
1473 
1474     /**
1475      * Test capture a given format stream with yuv stream simultaneously.
1476      *
1477      * <p>Use fixed yuv size, varies targeted format capture size. Single capture is tested.</p>
1478      *
1479      * @param format The capture format to be tested along with yuv format.
1480      * @param setUsageFlag The ImageReader factory method to be used (with or without specifying
1481      *                     usage flag)
1482      */
bufferFormatWithYuvTestByCamera(int format, boolean setUsageFlag)1483     private void bufferFormatWithYuvTestByCamera(int format, boolean setUsageFlag)
1484             throws Exception {
1485         if (format != ImageFormat.JPEG && format != ImageFormat.RAW_SENSOR
1486                 && format != ImageFormat.YUV_420_888) {
1487             throw new IllegalArgumentException("Unsupported format: " + format);
1488         }
1489 
1490         final int NUM_SINGLE_CAPTURE_TESTED = MAX_NUM_IMAGES - 1;
1491         Size maxYuvSz = mOrderedPreviewSizes.get(0);
1492         Size[] targetCaptureSizes = mStaticInfo.getAvailableSizesForFormatChecked(format,
1493                 StaticMetadata.StreamDirection.Output);
1494 
1495         for (Size captureSz : targetCaptureSizes) {
1496             if (VERBOSE) {
1497                 Log.v(TAG, "Testing yuv size " + maxYuvSz.toString() + " and capture size "
1498                         + captureSz.toString() + " for camera " + mCamera.getId());
1499             }
1500 
1501             ImageReader captureReader = null;
1502             ImageReader yuvReader = null;
1503             try {
1504                 // Create YUV image reader
1505                 SimpleImageReaderListener yuvListener  = new SimpleImageReaderListener();
1506                 if (setUsageFlag) {
1507                     yuvReader = createImageReader(maxYuvSz, ImageFormat.YUV_420_888, MAX_NUM_IMAGES,
1508                             HardwareBuffer.USAGE_CPU_READ_OFTEN, yuvListener);
1509                 } else {
1510                     yuvReader = createImageReader(maxYuvSz, ImageFormat.YUV_420_888, MAX_NUM_IMAGES,
1511                             yuvListener);
1512                 }
1513 
1514                 Surface yuvSurface = yuvReader.getSurface();
1515 
1516                 // Create capture image reader
1517                 SimpleImageReaderListener captureListener = new SimpleImageReaderListener();
1518                 if (setUsageFlag) {
1519                     captureReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
1520                             HardwareBuffer.USAGE_CPU_READ_OFTEN, captureListener);
1521                 } else {
1522                     captureReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
1523                             captureListener);
1524                 }
1525                 Surface captureSurface = captureReader.getSurface();
1526 
1527                 // Capture images.
1528                 List<Surface> outputSurfaces = new ArrayList<Surface>();
1529                 outputSurfaces.add(yuvSurface);
1530                 outputSurfaces.add(captureSurface);
1531                 CaptureRequest.Builder request = prepareCaptureRequestForSurfaces(outputSurfaces,
1532                         CameraDevice.TEMPLATE_PREVIEW);
1533                 SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
1534 
1535                 for (int i = 0; i < NUM_SINGLE_CAPTURE_TESTED; i++) {
1536                     startCapture(request.build(), /*repeating*/false, resultListener, mHandler);
1537                 }
1538 
1539                 // Verify capture result and images
1540                 for (int i = 0; i < NUM_SINGLE_CAPTURE_TESTED; i++) {
1541                     resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
1542                     if (VERBOSE) {
1543                         Log.v(TAG, " Got the capture result back for " + i + "th capture");
1544                     }
1545 
1546                     Image yuvImage = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
1547                     if (VERBOSE) {
1548                         Log.v(TAG, " Got the yuv image back for " + i + "th capture");
1549                     }
1550 
1551                     Image captureImage = captureListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
1552                     if (VERBOSE) {
1553                         Log.v(TAG, " Got the capture image back for " + i + "th capture");
1554                     }
1555 
1556                     //Validate captured images.
1557                     CameraTestUtils.validateImage(yuvImage, maxYuvSz.getWidth(),
1558                             maxYuvSz.getHeight(), ImageFormat.YUV_420_888, /*filePath*/null);
1559                     CameraTestUtils.validateImage(captureImage, captureSz.getWidth(),
1560                             captureSz.getHeight(), format, /*filePath*/null);
1561                     yuvImage.close();
1562                     captureImage.close();
1563                 }
1564 
1565                 // Stop capture, delete the streams.
1566                 stopCapture(/*fast*/false);
1567             } finally {
1568                 closeImageReader(captureReader);
1569                 captureReader = null;
1570                 closeImageReader(yuvReader);
1571                 yuvReader = null;
1572             }
1573         }
1574     }
1575 
invalidAccessTestAfterClose()1576     private void invalidAccessTestAfterClose() throws Exception {
1577         final int FORMAT = mStaticInfo.isColorOutputSupported() ?
1578             ImageFormat.YUV_420_888 : ImageFormat.DEPTH16;
1579 
1580         Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(FORMAT,
1581                 StaticMetadata.StreamDirection.Output);
1582         Image img = null;
1583         // Create ImageReader.
1584         mListener = new SimpleImageListener();
1585         createDefaultImageReader(availableSizes[0], FORMAT, MAX_NUM_IMAGES, mListener);
1586 
1587         // Start capture.
1588         CaptureRequest request = prepareCaptureRequest();
1589         SimpleCaptureCallback listener = new SimpleCaptureCallback();
1590         startCapture(request, /* repeating */false, listener, mHandler);
1591 
1592         mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
1593         img = mReader.acquireNextImage();
1594         Plane firstPlane = img.getPlanes()[0];
1595         ByteBuffer buffer = firstPlane.getBuffer();
1596         img.close();
1597 
1598         imageInvalidAccessTestAfterClose(img, firstPlane, buffer);
1599     }
1600 
1601     /**
1602      * Test that images captured after discarding free buffers are valid.
1603      */
discardFreeBuffersTestByCamera()1604     private void discardFreeBuffersTestByCamera() throws Exception {
1605         final int FORMAT = mStaticInfo.isColorOutputSupported() ?
1606             ImageFormat.YUV_420_888 : ImageFormat.DEPTH16;
1607 
1608         final Size SIZE = mStaticInfo.getAvailableSizesForFormatChecked(FORMAT,
1609                 StaticMetadata.StreamDirection.Output)[0];
1610         // Create ImageReader.
1611         mListener = new SimpleImageListener();
1612         createDefaultImageReader(SIZE, FORMAT, MAX_NUM_IMAGES, mListener);
1613 
1614         // Start capture.
1615         final boolean REPEATING = true;
1616         final boolean SINGLE = false;
1617         CaptureRequest request = prepareCaptureRequest();
1618         SimpleCaptureCallback listener = new SimpleCaptureCallback();
1619         startCapture(request, REPEATING, listener, mHandler);
1620 
1621         // Validate images and capture results.
1622         validateImage(SIZE, FORMAT, NUM_FRAME_VERIFIED, REPEATING, /*colorSpace*/ null);
1623         validateCaptureResult(FORMAT, SIZE, listener, NUM_FRAME_VERIFIED);
1624 
1625         // Discard free buffers.
1626         mReader.discardFreeBuffers();
1627 
1628         // Validate images and capture resulst again.
1629         validateImage(SIZE, FORMAT, NUM_FRAME_VERIFIED, REPEATING, /*colorSpace*/ null);
1630         validateCaptureResult(FORMAT, SIZE, listener, NUM_FRAME_VERIFIED);
1631 
1632         // Stop repeating request in preparation for discardFreeBuffers
1633         mCameraSession.stopRepeating();
1634         mCameraSessionListener.getStateWaiter().waitForState(
1635                 BlockingSessionCallback.SESSION_READY, SESSION_READY_TIMEOUT_MS);
1636 
1637         // Drain the reader queue and discard free buffers from the reader.
1638         Image img = mReader.acquireLatestImage();
1639         if (img != null) {
1640             img.close();
1641         }
1642         mReader.discardFreeBuffers();
1643 
1644         // Do a single capture for camera device to reallocate buffers
1645         mListener.reset();
1646         startCapture(request, SINGLE, listener, mHandler);
1647         validateImage(SIZE, FORMAT, /*captureCount*/ 1, SINGLE, /*colorSpace*/ null);
1648     }
1649 
1650     private class BufferFormatTestParam {
1651         public int mFormat;
1652         public boolean mRepeating;
1653         public boolean mSetUsageFlag = false;
1654         public long mUsageFlag = HardwareBuffer.USAGE_CPU_READ_OFTEN;
1655         public boolean mCheckSession = false;
1656         public boolean mValidateImageData = true;
1657         public String mPhysicalId = null;
1658         public long mDynamicRangeProfile = DynamicRangeProfiles.STANDARD;
1659         public ColorSpace.Named mColorSpace;
1660         public boolean mUseColorSpace = false;
1661         public int mTimestampBase = OutputConfiguration.TIMESTAMP_BASE_DEFAULT;
1662 
BufferFormatTestParam(int format, boolean repeating)1663         BufferFormatTestParam(int format, boolean repeating) {
1664             mFormat = format;
1665             mRepeating = repeating;
1666         }
1667     };
1668 
bufferFormatTestByCamera(BufferFormatTestParam params)1669     private void bufferFormatTestByCamera(BufferFormatTestParam params)
1670             throws Exception {
1671         int format = params.mFormat;
1672         boolean setUsageFlag = params.mSetUsageFlag;
1673         long usageFlag = params.mUsageFlag;
1674         boolean repeating = params.mRepeating;
1675         boolean validateImageData = params.mValidateImageData;
1676         int timestampBase = params.mTimestampBase;
1677 
1678         String physicalId = params.mPhysicalId;
1679         StaticMetadata staticInfo;
1680         if (physicalId == null) {
1681             staticInfo = mStaticInfo;
1682         } else {
1683             staticInfo = mAllStaticInfo.get(physicalId);
1684         }
1685 
1686         Size[] availableSizes = staticInfo.getAvailableSizesForFormatChecked(format,
1687                 StaticMetadata.StreamDirection.Output);
1688 
1689         boolean secureTest = setUsageFlag &&
1690                 ((usageFlag & HardwareBuffer.USAGE_PROTECTED_CONTENT) != 0);
1691         Size secureDataSize = null;
1692         if (secureTest) {
1693             secureDataSize = staticInfo.getCharacteristics().get(
1694                     CameraCharacteristics.SCALER_DEFAULT_SECURE_IMAGE_SIZE);
1695         }
1696 
1697         boolean validateTimestampBase = (timestampBase
1698                 != OutputConfiguration.TIMESTAMP_BASE_DEFAULT);
1699         Integer deviceTimestampSource = staticInfo.getCharacteristics().get(
1700                 CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE);
1701         // for each resolution, test imageReader:
1702         for (Size sz : availableSizes) {
1703             try {
1704                 // For secure mode test only test default secure data size if HAL advertises one.
1705                 if (secureDataSize != null && !secureDataSize.equals(sz)) {
1706                     continue;
1707                 }
1708 
1709                 if (VERBOSE) {
1710                     Log.v(TAG, "Testing size " + sz.toString() + " format " + format
1711                             + " for camera " + mCamera.getId());
1712                 }
1713 
1714                 // Create ImageReader.
1715                 mListener  = new SimpleImageListener();
1716                 if (setUsageFlag) {
1717                     createDefaultImageReader(sz, format, MAX_NUM_IMAGES, usageFlag, mListener);
1718                 } else {
1719                     createDefaultImageReader(sz, format, MAX_NUM_IMAGES, mListener);
1720                 }
1721 
1722                 // Don't queue up images if we won't validate them
1723                 if (!validateImageData && !validateTimestampBase) {
1724                     ImageDropperListener imageDropperListener = new ImageDropperListener();
1725                     mReader.setOnImageAvailableListener(imageDropperListener, mHandler);
1726                 }
1727 
1728                 if (params.mCheckSession) {
1729                     checkImageReaderSessionConfiguration(
1730                             "Camera capture session validation for format: " + format + "failed",
1731                             physicalId);
1732                 }
1733 
1734                 ArrayList<OutputConfiguration> outputConfigs = new ArrayList<>();
1735                 OutputConfiguration config = new OutputConfiguration(mReader.getSurface());
1736                 assertTrue("Default timestamp source must be DEFAULT",
1737                         config.getTimestampBase() == OutputConfiguration.TIMESTAMP_BASE_DEFAULT);
1738                 assertTrue("Default mirroring mode must be AUTO",
1739                         config.getMirrorMode() == OutputConfiguration.MIRROR_MODE_AUTO);
1740                 if (physicalId != null) {
1741                     config.setPhysicalCameraId(physicalId);
1742                 }
1743                 config.setDynamicRangeProfile(params.mDynamicRangeProfile);
1744                 config.setTimestampBase(params.mTimestampBase);
1745                 outputConfigs.add(config);
1746 
1747                 CaptureRequest request;
1748                 if (params.mUseColorSpace) {
1749                     request = prepareCaptureRequestForColorSpace(
1750                         outputConfigs, CameraDevice.TEMPLATE_PREVIEW, params.mColorSpace)
1751                         .build();
1752                 } else {
1753                     request = prepareCaptureRequestForConfigs(
1754                         outputConfigs, CameraDevice.TEMPLATE_PREVIEW).build();
1755                 }
1756 
1757                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
1758                 startCapture(request, repeating, listener, mHandler);
1759 
1760                 int numFrameVerified = repeating ? NUM_FRAME_VERIFIED : 1;
1761 
1762                 if (validateTimestampBase) {
1763                     validateTimestamps(deviceTimestampSource, timestampBase, numFrameVerified,
1764                             listener, repeating);
1765                 }
1766 
1767                 if (validateImageData) {
1768                     // Validate images.
1769                     ColorSpace colorSpace = null;
1770                     if (params.mUseColorSpace) {
1771                         colorSpace = ColorSpace.get(params.mColorSpace);
1772                     }
1773                     validateImage(sz, format, numFrameVerified, repeating, colorSpace);
1774                 }
1775 
1776                 // Validate capture result.
1777                 validateCaptureResult(format, sz, listener, numFrameVerified);
1778 
1779                 // stop capture.
1780                 stopCapture(/*fast*/false);
1781             } finally {
1782                 closeDefaultImageReader();
1783             }
1784 
1785             // Only test one size for non-default timestamp base.
1786             if (timestampBase != OutputConfiguration.TIMESTAMP_BASE_DEFAULT) break;
1787         }
1788     }
1789 
bufferFormatLongProcessingTimeTestByCamera(int format)1790     private void bufferFormatLongProcessingTimeTestByCamera(int format)
1791             throws Exception {
1792 
1793         final int TEST_SENSITIVITY_VALUE = mStaticInfo.getSensitivityClampToRange(204);
1794         final long TEST_EXPOSURE_TIME_NS = mStaticInfo.getExposureClampToRange(28000000);
1795         final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000;
1796 
1797         Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(format,
1798                 StaticMetadata.StreamDirection.Output);
1799 
1800         // for each resolution, test imageReader:
1801         for (Size sz : availableSizes) {
1802             Log.v(TAG, "testing size " + sz.toString());
1803             try {
1804                 if (VERBOSE) {
1805                     Log.v(TAG, "Testing long processing time: size " + sz.toString() + " format " +
1806                             format + " for camera " + mCamera.getId());
1807                 }
1808 
1809                 // Create ImageReader.
1810                 mListener  = new SimpleImageListener();
1811                 createDefaultImageReader(sz, format, MAX_NUM_IMAGES, mListener);
1812 
1813                 // Setting manual controls
1814                 List<Surface> outputSurfaces = new ArrayList<Surface>();
1815                 outputSurfaces.add(mReader.getSurface());
1816                 CaptureRequest.Builder requestBuilder = prepareCaptureRequestForSurfaces(
1817                         outputSurfaces, CameraDevice.TEMPLATE_STILL_CAPTURE);
1818 
1819                 requestBuilder.set(
1820                         CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_OFF);
1821                 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
1822                 requestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);
1823                 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
1824                         CaptureRequest.CONTROL_AE_MODE_OFF);
1825                 requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE,
1826                         CaptureRequest.CONTROL_AWB_MODE_OFF);
1827                 requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, TEST_SENSITIVITY_VALUE);
1828                 requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, TEST_EXPOSURE_TIME_NS);
1829 
1830                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
1831                 startCapture(requestBuilder.build(), /*repeating*/true, listener, mHandler);
1832 
1833                 for (int i = 0; i < NUM_LONG_PROCESS_TIME_FRAME_VERIFIED; i++) {
1834                     mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
1835 
1836                     // Verify image.
1837                     Image img = mReader.acquireNextImage();
1838                     assertNotNull("Unable to acquire next image", img);
1839                     CameraTestUtils.validateImage(img, sz.getWidth(), sz.getHeight(), format,
1840                             mDebugFileNameBase);
1841 
1842                     // Verify the exposure time and iso match the requested values.
1843                     CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
1844 
1845                     long exposureTimeDiff = TEST_EXPOSURE_TIME_NS -
1846                             getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
1847                     int sensitivityDiff = TEST_SENSITIVITY_VALUE -
1848                             getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
1849 
1850                     mCollector.expectTrue(
1851                             String.format("Long processing frame %d format %d size %s " +
1852                                     "exposure time was %d expecting %d.", i, format, sz.toString(),
1853                                     getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME),
1854                                     TEST_EXPOSURE_TIME_NS),
1855                             exposureTimeDiff < EXPOSURE_TIME_ERROR_MARGIN_NS &&
1856                             exposureTimeDiff >= 0);
1857 
1858                     mCollector.expectTrue(
1859                             String.format("Long processing frame %d format %d size %s " +
1860                                     "sensitivity was %d expecting %d.", i, format, sz.toString(),
1861                                     getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY),
1862                                     TEST_SENSITIVITY_VALUE),
1863                             sensitivityDiff >= 0);
1864 
1865 
1866                     // Sleep to Simulate long porcessing before closing the image.
1867                     Thread.sleep(LONG_PROCESS_TIME_MS);
1868                     img.close();
1869                 }
1870                 // Stop capture.
1871                 // Drain the reader queue in case the full queue blocks
1872                 // HAL from delivering new results
1873                 ImageDropperListener imageDropperListener = new ImageDropperListener();
1874                 mReader.setOnImageAvailableListener(imageDropperListener, mHandler);
1875                 Image img = mReader.acquireLatestImage();
1876                 if (img != null) {
1877                     img.close();
1878                 }
1879                 stopCapture(/*fast*/true);
1880             } finally {
1881                 closeDefaultImageReader();
1882             }
1883         }
1884     }
1885 
1886     /**
1887      * Validate capture results.
1888      *
1889      * @param format The format of this capture.
1890      * @param size The capture size.
1891      * @param listener The capture listener to get capture result callbacks.
1892      */
validateCaptureResult(int format, Size size, SimpleCaptureCallback listener, int numFrameVerified)1893     private void validateCaptureResult(int format, Size size, SimpleCaptureCallback listener,
1894             int numFrameVerified) {
1895         for (int i = 0; i < numFrameVerified; i++) {
1896             CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
1897 
1898             // TODO: Update this to use availableResultKeys once shim supports this.
1899             if (mStaticInfo.isCapabilitySupported(
1900                     CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) {
1901                 StaticMetadata staticInfo = mStaticInfo;
1902                 boolean supportActivePhysicalIdConsistency =
1903                         PropertyUtil.getFirstApiLevel() >= Build.VERSION_CODES.S;
1904                 if (mStaticInfo.isLogicalMultiCamera() && supportActivePhysicalIdConsistency
1905                         && mStaticInfo.isActivePhysicalCameraIdSupported()) {
1906                     String activePhysicalId =
1907                             result.get(CaptureResult.LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID);
1908                     staticInfo = mAllStaticInfo.get(activePhysicalId);
1909                 }
1910 
1911                 Long exposureTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
1912                 Integer sensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
1913                 mCollector.expectInRange(
1914                         String.format(
1915                                 "Capture for format %d, size %s exposure time is invalid.",
1916                                 format, size.toString()),
1917                         exposureTime,
1918                         staticInfo.getExposureMinimumOrDefault(),
1919                         staticInfo.getExposureMaximumOrDefault()
1920                 );
1921                 mCollector.expectInRange(
1922                         String.format("Capture for format %d, size %s sensitivity is invalid.",
1923                                 format, size.toString()),
1924                         sensitivity,
1925                         staticInfo.getSensitivityMinimumOrDefault(),
1926                         staticInfo.getSensitivityMaximumOrDefault()
1927                 );
1928             }
1929             // TODO: add more key validations.
1930         }
1931     }
1932 
1933     private final class SimpleImageListener implements ImageReader.OnImageAvailableListener {
1934         private final ConditionVariable imageAvailable = new ConditionVariable();
1935         @Override
onImageAvailable(ImageReader reader)1936         public void onImageAvailable(ImageReader reader) {
1937             if (mReader != reader) {
1938                 return;
1939             }
1940 
1941             if (VERBOSE) Log.v(TAG, "new image available");
1942             imageAvailable.open();
1943         }
1944 
waitForAnyImageAvailable(long timeout)1945         public void waitForAnyImageAvailable(long timeout) {
1946             if (imageAvailable.block(timeout)) {
1947                 imageAvailable.close();
1948             } else {
1949                 fail("wait for image available timed out after " + timeout + "ms");
1950             }
1951         }
1952 
closePendingImages()1953         public void closePendingImages() {
1954             Image image = mReader.acquireLatestImage();
1955             if (image != null) {
1956                 image.close();
1957             }
1958         }
1959 
reset()1960         public void reset() {
1961             imageAvailable.close();
1962         }
1963     }
1964 
validateImage(Size sz, int format, int captureCount, boolean repeating, ColorSpace colorSpace)1965     private void validateImage(Size sz, int format, int captureCount,  boolean repeating,
1966             ColorSpace colorSpace) throws Exception {
1967         // TODO: Add more format here, and wrap each one as a function.
1968         Image img;
1969         final int MAX_RETRY_COUNT = 20;
1970         int numImageVerified = 0;
1971         int reTryCount = 0;
1972         while (numImageVerified < captureCount) {
1973             assertNotNull("Image listener is null", mListener);
1974             if (VERBOSE) Log.v(TAG, "Waiting for an Image");
1975             mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
1976             if (repeating) {
1977                 /**
1978                  * Acquire the latest image in case the validation is slower than
1979                  * the image producing rate.
1980                  */
1981                 img = mReader.acquireLatestImage();
1982                 /**
1983                  * Sometimes if multiple onImageAvailable callbacks being queued,
1984                  * acquireLatestImage will clear all buffer before corresponding callback is
1985                  * executed. Wait for a new frame in that case.
1986                  */
1987                 if (img == null && reTryCount < MAX_RETRY_COUNT) {
1988                     reTryCount++;
1989                     continue;
1990                 }
1991             } else {
1992                 img = mReader.acquireNextImage();
1993             }
1994             assertNotNull("Unable to acquire the latest image", img);
1995             if (VERBOSE) Log.v(TAG, "Got the latest image");
1996             CameraTestUtils.validateImage(img, sz.getWidth(), sz.getHeight(), format,
1997                     mDebugFileNameBase, colorSpace);
1998             HardwareBuffer hwb = img.getHardwareBuffer();
1999             assertNotNull("Unable to retrieve the Image's HardwareBuffer", hwb);
2000             if (format == ImageFormat.DEPTH_JPEG) {
2001                 byte [] dynamicDepthBuffer = CameraTestUtils.getDataFromImage(img);
2002                 assertTrue("Dynamic depth validation failed!",
2003                         validateDynamicDepthNative(dynamicDepthBuffer));
2004             }
2005             if (VERBOSE) Log.v(TAG, "finish validation of image " + numImageVerified);
2006             img.close();
2007             numImageVerified++;
2008             reTryCount = 0;
2009         }
2010 
2011         // Return all pending images to the ImageReader as the validateImage may
2012         // take a while to return and there could be many images pending.
2013         mListener.closePendingImages();
2014     }
2015 
validateTimestamps(Integer deviceTimestampSource, int timestampBase, int captureCount, SimpleCaptureCallback listener, boolean repeating)2016     private void validateTimestamps(Integer deviceTimestampSource, int timestampBase,
2017             int captureCount, SimpleCaptureCallback listener, boolean repeating) throws Exception {
2018         Image img;
2019         final int MAX_RETRY_COUNT = 20;
2020         int numImageVerified = 0;
2021         int retryCount = 0;
2022         List<Long> imageTimestamps = new ArrayList<Long>();
2023         assertNotNull("Image listener is null", mListener);
2024         while (numImageVerified < captureCount) {
2025             if (VERBOSE) Log.v(TAG, "Waiting for an Image");
2026             mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
2027             if (repeating) {
2028                 img = mReader.acquireLatestImage();
2029                 if (img == null && retryCount < MAX_RETRY_COUNT) {
2030                     retryCount++;
2031                     continue;
2032                 }
2033             } else {
2034                 img = mReader.acquireNextImage();
2035             }
2036             assertNotNull("Unable to acquire the latest image", img);
2037             if (VERBOSE) {
2038                 Log.v(TAG, "Got the latest image with timestamp " + img.getTimestamp());
2039             }
2040             imageTimestamps.add(img.getTimestamp());
2041             img.close();
2042             numImageVerified++;
2043             retryCount = 0;
2044         }
2045 
2046         List<Long> captureStartTimestamps = listener.getCaptureStartTimestamps(captureCount);
2047         if (VERBOSE) {
2048             Log.v(TAG, "deviceTimestampSource: " + deviceTimestampSource
2049                     + ", timestampBase: " + timestampBase + ", captureStartTimestamps: "
2050                     + captureStartTimestamps + ", imageTimestamps: " + imageTimestamps);
2051         }
2052         if (timestampBase == OutputConfiguration.TIMESTAMP_BASE_SENSOR
2053                 || (timestampBase == OutputConfiguration.TIMESTAMP_BASE_MONOTONIC
2054                 && deviceTimestampSource == CameraMetadata.SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN)
2055                 || (timestampBase == OutputConfiguration.TIMESTAMP_BASE_REALTIME
2056                 && deviceTimestampSource == CameraMetadata.SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME)) {
2057             // Makes sure image timestamps match capture started timestamp
2058             for (Long timestamp : imageTimestamps) {
2059                 mCollector.expectTrue("Image timestamp " + timestamp
2060                         + " should match one of onCaptureStarted timestamps "
2061                         + captureStartTimestamps,
2062                         captureStartTimestamps.contains(timestamp));
2063             }
2064         } else if (timestampBase == OutputConfiguration.TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED) {
2065             // Make sure that timestamp base is MONOTONIC. Do not strictly check against
2066             // choreographer callback because there are cases camera framework doesn't use
2067             // choreographer timestamp (when consumer is slower than camera for example).
2068             final int TIMESTAMP_THRESHOLD_MILLIS = 3000; // 3 seconds
2069             long monotonicTime = SystemClock.uptimeMillis();
2070             for (Long timestamp : imageTimestamps) {
2071                 long timestampMs = TimeUnit.NANOSECONDS.toMillis(timestamp);
2072                 mCollector.expectTrue("Image timestamp " + timestampMs + " ms should be in the "
2073                         + "same timebase as SystemClock.updateMillis " + monotonicTime
2074                         + " ms when timestamp base is set to CHOREOGRAPHER synced",
2075                         Math.abs(timestampMs - monotonicTime) < TIMESTAMP_THRESHOLD_MILLIS);
2076             }
2077         }
2078 
2079         // Return all pending images to the ImageReader as the validateImage may
2080         // take a while to return and there could be many images pending.
2081         mListener.closePendingImages();
2082     }
2083 
2084     /** Load dynamic depth validation jni on initialization */
2085     static {
2086         System.loadLibrary("ctscamera2_jni");
2087     }
2088     /**
2089      * Use the dynamic depth SDK to validate a dynamic depth file stored in the buffer.
2090      *
2091      * Returns false if the dynamic depth has validation errors. Validation warnings/errors
2092      * will be printed to logcat.
2093      */
validateDynamicDepthNative(byte[] dynamicDepthBuffer)2094     private static native boolean validateDynamicDepthNative(byte[] dynamicDepthBuffer);
2095 }
2096