• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.params;
18 
19 import static com.android.internal.util.Preconditions.checkArrayElementsNotNull;
20 
21 import android.graphics.ImageFormat;
22 import android.graphics.PixelFormat;
23 import android.hardware.DataSpace;
24 import android.hardware.camera2.CameraCharacteristics;
25 import android.hardware.camera2.CameraDevice;
26 import android.hardware.camera2.CameraMetadata;
27 import android.hardware.camera2.CaptureRequest;
28 import android.hardware.camera2.utils.HashCodeHelpers;
29 import android.hardware.camera2.utils.SurfaceUtils;
30 import android.util.Range;
31 import android.util.Size;
32 import android.util.SparseIntArray;
33 import android.view.Surface;
34 
35 import com.android.internal.camera.flags.Flags;
36 
37 import java.util.Arrays;
38 import java.util.HashMap;
39 import java.util.Objects;
40 import java.util.Set;
41 
42 /**
43  * Immutable class to store the available stream
44  * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP configurations} to set up
45  * {@link android.view.Surface Surfaces} for creating a
46  * {@link android.hardware.camera2.CameraCaptureSession capture session} with
47  * {@link android.hardware.camera2.CameraDevice#createCaptureSession(SessionConfiguration)}.
48  * <!-- TODO: link to input stream configuration -->
49  *
50  * <p>This is the authoritative list for all <!-- input/ -->output formats (and sizes respectively
51  * for that format) that are supported by a camera device.</p>
52  *
53  * <p>This also contains the minimum frame durations and stall durations for each format/size
54  * combination that can be used to calculate effective frame rate when submitting multiple captures.
55  * </p>
56  *
57  * <p>An instance of this object is available from {@link CameraCharacteristics} using
58  * the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP} key and the
59  * {@link CameraCharacteristics#get} method.</p>
60  *
61  * <pre><code>{@code
62  * CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
63  * StreamConfigurationMap configs = characteristics.get(
64  *         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
65  * }</code></pre>
66  *
67  * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
68  * @see CameraDevice#createCaptureSession(SessionConfiguration)
69  */
70 public final class StreamConfigurationMap {
71 
72     private static final String TAG = "StreamConfigurationMap";
73 
74     private static final int MAX_DIMEN_FOR_ROUNDING = 1920; // maximum allowed width for rounding
75 
76     /**
77      * Create a new {@link StreamConfigurationMap}.
78      *
79      * <p>The array parameters ownership is passed to this object after creation; do not
80      * write to them after this constructor is invoked.</p>
81      *
82      * @param configurations a non-{@code null} array of {@link StreamConfiguration}
83      * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
84      * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
85      * @param depthConfigurations a non-{@code null} array of depth {@link StreamConfiguration}
86      * @param depthMinFrameDurations a non-{@code null} array of depth
87      *        {@link StreamConfigurationDuration}
88      * @param depthStallDurations a non-{@code null} array of depth
89      *        {@link StreamConfigurationDuration}
90      * @param dynamicDepthConfigurations a non-{@code null} array of dynamic depth
91      *        {@link StreamConfiguration}
92      * @param dynamicDepthMinFrameDurations a non-{@code null} array of dynamic depth
93      *        {@link StreamConfigurationDuration}
94      * @param dynamicDepthStallDurations a non-{@code null} array of dynamic depth
95      *        {@link StreamConfigurationDuration}
96      * @param heicConfigurations a non-{@code null} array of heic {@link StreamConfiguration}
97      * @param heicMinFrameDurations a non-{@code null} array of heic
98      *        {@link StreamConfigurationDuration}
99      * @param heicStallDurations a non-{@code null} array of heic
100      *        {@link StreamConfigurationDuration}
101      * @param jpegRConfigurations a non-{@code null} array of Jpeg/R {@link StreamConfiguration}
102      * @param jpegRMinFrameDurations a non-{@code null} array of Jpeg/R
103      *        {@link StreamConfigurationDuration}
104      * @param jpegRStallDurations a non-{@code null} array of Jpeg/R
105      *        {@link StreamConfigurationDuration}
106      * @param heicUltraHDRConfigurations a non-{@code null} array of Heic UltraHDR
107      *        {@link StreamConfiguration}
108      * @param heicUltraHDRMinFrameDurations a non-{@code null} array of Heic UltraHDR
109      *        {@link StreamConfigurationDuration}
110      * @param heicUltraHDRStallDurations a non-{@code null} array of Heic UltraHDR
111      *        {@link StreamConfigurationDuration}
112      * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
113      *        camera device does not support high speed video recording
114      * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
115      *        and thus needs a separate list of slow high-resolution output sizes
116      * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations
117      *         were {@code null} or any subelements were {@code null}
118      *
119      * @hide
120      */
StreamConfigurationMap( StreamConfiguration[] configurations, StreamConfigurationDuration[] minFrameDurations, StreamConfigurationDuration[] stallDurations, StreamConfiguration[] depthConfigurations, StreamConfigurationDuration[] depthMinFrameDurations, StreamConfigurationDuration[] depthStallDurations, StreamConfiguration[] dynamicDepthConfigurations, StreamConfigurationDuration[] dynamicDepthMinFrameDurations, StreamConfigurationDuration[] dynamicDepthStallDurations, StreamConfiguration[] heicConfigurations, StreamConfigurationDuration[] heicMinFrameDurations, StreamConfigurationDuration[] heicStallDurations, StreamConfiguration[] jpegRConfigurations, StreamConfigurationDuration[] jpegRMinFrameDurations, StreamConfigurationDuration[] jpegRStallDurations, StreamConfiguration[] heicUltraHDRConfigurations, StreamConfigurationDuration[] heicUltraHDRMinFrameDurations, StreamConfigurationDuration[] heicUltraHDRStallDurations, HighSpeedVideoConfiguration[] highSpeedVideoConfigurations, ReprocessFormatsMap inputOutputFormatsMap, boolean listHighResolution)121     public StreamConfigurationMap(
122             StreamConfiguration[] configurations,
123             StreamConfigurationDuration[] minFrameDurations,
124             StreamConfigurationDuration[] stallDurations,
125             StreamConfiguration[] depthConfigurations,
126             StreamConfigurationDuration[] depthMinFrameDurations,
127             StreamConfigurationDuration[] depthStallDurations,
128             StreamConfiguration[] dynamicDepthConfigurations,
129             StreamConfigurationDuration[] dynamicDepthMinFrameDurations,
130             StreamConfigurationDuration[] dynamicDepthStallDurations,
131             StreamConfiguration[] heicConfigurations,
132             StreamConfigurationDuration[] heicMinFrameDurations,
133             StreamConfigurationDuration[] heicStallDurations,
134             StreamConfiguration[] jpegRConfigurations,
135             StreamConfigurationDuration[] jpegRMinFrameDurations,
136             StreamConfigurationDuration[] jpegRStallDurations,
137             StreamConfiguration[] heicUltraHDRConfigurations,
138             StreamConfigurationDuration[] heicUltraHDRMinFrameDurations,
139             StreamConfigurationDuration[] heicUltraHDRStallDurations,
140             HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
141             ReprocessFormatsMap inputOutputFormatsMap,
142             boolean listHighResolution) {
143         this(configurations, minFrameDurations, stallDurations,
144                     depthConfigurations, depthMinFrameDurations, depthStallDurations,
145                     dynamicDepthConfigurations, dynamicDepthMinFrameDurations,
146                     dynamicDepthStallDurations,
147                     heicConfigurations, heicMinFrameDurations, heicStallDurations,
148                     jpegRConfigurations, jpegRMinFrameDurations, jpegRStallDurations,
149                     heicUltraHDRConfigurations, heicUltraHDRMinFrameDurations,
150                     heicUltraHDRStallDurations, highSpeedVideoConfigurations, inputOutputFormatsMap,
151                     listHighResolution, /*enforceImplementationDefined*/ true);
152     }
153 
154     /**
155      * Create a new {@link StreamConfigurationMap}.
156      *
157      * <p>The array parameters ownership is passed to this object after creation; do not
158      * write to them after this constructor is invoked.</p>
159      *
160      * @param configurations a non-{@code null} array of {@link StreamConfiguration}
161      * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
162      * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
163      * @param depthConfigurations a non-{@code null} array of depth {@link StreamConfiguration}
164      * @param depthMinFrameDurations a non-{@code null} array of depth
165      *        {@link StreamConfigurationDuration}
166      * @param depthStallDurations a non-{@code null} array of depth
167      *        {@link StreamConfigurationDuration}
168      * @param dynamicDepthConfigurations a non-{@code null} array of dynamic depth
169      *        {@link StreamConfiguration}
170      * @param dynamicDepthMinFrameDurations a non-{@code null} array of dynamic depth
171      *        {@link StreamConfigurationDuration}
172      * @param dynamicDepthStallDurations a non-{@code null} array of dynamic depth
173      *        {@link StreamConfigurationDuration}
174      * @param heicConfigurations a non-{@code null} array of heic {@link StreamConfiguration}
175      * @param heicMinFrameDurations a non-{@code null} array of heic
176      *        {@link StreamConfigurationDuration}
177      * @param heicStallDurations a non-{@code null} array of heic
178      *        {@link StreamConfigurationDuration}
179      * @param jpegRConfigurations a non-{@code null} array of Jpeg/R {@link StreamConfiguration}
180      * @param jpegRMinFrameDurations a non-{@code null} array of Jpeg/R
181      *        {@link StreamConfigurationDuration}
182      * @param jpegRStallDurations a non-{@code null} array of Jpeg/R
183      *        {@link StreamConfigurationDuration}
184      * @param heicUltraHDRConfigurations an array of Heic UltraHDR
185      *        {@link StreamConfiguration}, {@code null} if camera doesn't support the format
186      * @param heicUltraHDRMinFrameDurations an array of Heic UltraHDR
187      *        {@link StreamConfigurationDuration}, {@code null} if camera doesn't support the format
188      * @param heicUltraHDRStallDurations an array of Heic UltraHDR
189      *        {@link StreamConfigurationDuration}, {@code null} if camera doesn't support the format
190      * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
191      *        camera device does not support high speed video recording
192      * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
193      *        and thus needs a separate list of slow high-resolution output sizes
194      * @param enforceImplementationDefined a flag indicating whether
195      *        IMPLEMENTATION_DEFINED format configuration must be present
196      * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations
197      *         were {@code null} or any subelements were {@code null}
198      *
199      * @hide
200      */
StreamConfigurationMap( StreamConfiguration[] configurations, StreamConfigurationDuration[] minFrameDurations, StreamConfigurationDuration[] stallDurations, StreamConfiguration[] depthConfigurations, StreamConfigurationDuration[] depthMinFrameDurations, StreamConfigurationDuration[] depthStallDurations, StreamConfiguration[] dynamicDepthConfigurations, StreamConfigurationDuration[] dynamicDepthMinFrameDurations, StreamConfigurationDuration[] dynamicDepthStallDurations, StreamConfiguration[] heicConfigurations, StreamConfigurationDuration[] heicMinFrameDurations, StreamConfigurationDuration[] heicStallDurations, StreamConfiguration[] jpegRConfigurations, StreamConfigurationDuration[] jpegRMinFrameDurations, StreamConfigurationDuration[] jpegRStallDurations, StreamConfiguration[] heicUltraHDRConfigurations, StreamConfigurationDuration[] heicUltraHDRMinFrameDurations, StreamConfigurationDuration[] heicUltraHDRStallDurations, HighSpeedVideoConfiguration[] highSpeedVideoConfigurations, ReprocessFormatsMap inputOutputFormatsMap, boolean listHighResolution, boolean enforceImplementationDefined)201     public StreamConfigurationMap(
202             StreamConfiguration[] configurations,
203             StreamConfigurationDuration[] minFrameDurations,
204             StreamConfigurationDuration[] stallDurations,
205             StreamConfiguration[] depthConfigurations,
206             StreamConfigurationDuration[] depthMinFrameDurations,
207             StreamConfigurationDuration[] depthStallDurations,
208             StreamConfiguration[] dynamicDepthConfigurations,
209             StreamConfigurationDuration[] dynamicDepthMinFrameDurations,
210             StreamConfigurationDuration[] dynamicDepthStallDurations,
211             StreamConfiguration[] heicConfigurations,
212             StreamConfigurationDuration[] heicMinFrameDurations,
213             StreamConfigurationDuration[] heicStallDurations,
214             StreamConfiguration[] jpegRConfigurations,
215             StreamConfigurationDuration[] jpegRMinFrameDurations,
216             StreamConfigurationDuration[] jpegRStallDurations,
217             StreamConfiguration[] heicUltraHDRConfigurations,
218             StreamConfigurationDuration[] heicUltraHDRMinFrameDurations,
219             StreamConfigurationDuration[] heicUltraHDRStallDurations,
220             HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
221             ReprocessFormatsMap inputOutputFormatsMap,
222             boolean listHighResolution,
223             boolean enforceImplementationDefined) {
224 
225         if (configurations == null &&
226                 depthConfigurations == null &&
227                 heicConfigurations == null) {
228             throw new NullPointerException("At least one of color/depth/heic configurations " +
229                     "must not be null");
230         }
231 
232         if (configurations == null) {
233             // If no color configurations exist, ensure depth ones do
234             mConfigurations = new StreamConfiguration[0];
235             mMinFrameDurations = new StreamConfigurationDuration[0];
236             mStallDurations = new StreamConfigurationDuration[0];
237         } else {
238             mConfigurations = checkArrayElementsNotNull(configurations, "configurations");
239             mMinFrameDurations = checkArrayElementsNotNull(minFrameDurations, "minFrameDurations");
240             mStallDurations = checkArrayElementsNotNull(stallDurations, "stallDurations");
241         }
242 
243         mListHighResolution = listHighResolution;
244 
245         if (depthConfigurations == null) {
246             mDepthConfigurations = new StreamConfiguration[0];
247             mDepthMinFrameDurations = new StreamConfigurationDuration[0];
248             mDepthStallDurations = new StreamConfigurationDuration[0];
249         } else {
250             mDepthConfigurations = checkArrayElementsNotNull(depthConfigurations,
251                     "depthConfigurations");
252             mDepthMinFrameDurations = checkArrayElementsNotNull(depthMinFrameDurations,
253                     "depthMinFrameDurations");
254             mDepthStallDurations = checkArrayElementsNotNull(depthStallDurations,
255                     "depthStallDurations");
256         }
257 
258         if (dynamicDepthConfigurations == null) {
259             mDynamicDepthConfigurations = new StreamConfiguration[0];
260             mDynamicDepthMinFrameDurations = new StreamConfigurationDuration[0];
261             mDynamicDepthStallDurations = new StreamConfigurationDuration[0];
262         } else {
263             mDynamicDepthConfigurations = checkArrayElementsNotNull(dynamicDepthConfigurations,
264                     "dynamicDepthConfigurations");
265             mDynamicDepthMinFrameDurations = checkArrayElementsNotNull(
266                     dynamicDepthMinFrameDurations, "dynamicDepthMinFrameDurations");
267             mDynamicDepthStallDurations = checkArrayElementsNotNull(dynamicDepthStallDurations,
268                     "dynamicDepthStallDurations");
269         }
270 
271         if (heicConfigurations == null) {
272             mHeicConfigurations = new StreamConfiguration[0];
273             mHeicMinFrameDurations = new StreamConfigurationDuration[0];
274             mHeicStallDurations = new StreamConfigurationDuration[0];
275         } else {
276             mHeicConfigurations = checkArrayElementsNotNull(heicConfigurations,
277                     "heicConfigurations");
278             mHeicMinFrameDurations = checkArrayElementsNotNull(heicMinFrameDurations,
279                     "heicMinFrameDurations");
280             mHeicStallDurations = checkArrayElementsNotNull(heicStallDurations,
281                     "heicStallDurations");
282         }
283 
284         if (heicUltraHDRConfigurations == null || (!Flags.cameraHeifGainmap())) {
285             mHeicUltraHDRConfigurations = new StreamConfiguration[0];
286             mHeicUltraHDRMinFrameDurations = new StreamConfigurationDuration[0];
287             mHeicUltraHDRStallDurations = new StreamConfigurationDuration[0];
288         } else {
289             mHeicUltraHDRConfigurations = checkArrayElementsNotNull(heicUltraHDRConfigurations,
290                     "heicUltraHDRConfigurations");
291             mHeicUltraHDRMinFrameDurations = checkArrayElementsNotNull(
292                     heicUltraHDRMinFrameDurations, "heicUltraHDRMinFrameDurations");
293             mHeicUltraHDRStallDurations = checkArrayElementsNotNull(heicUltraHDRStallDurations,
294                     "heicUltraHDRStallDurations");
295         }
296 
297         if (jpegRConfigurations == null) {
298             mJpegRConfigurations = new StreamConfiguration[0];
299             mJpegRMinFrameDurations = new StreamConfigurationDuration[0];
300             mJpegRStallDurations = new StreamConfigurationDuration[0];
301         } else {
302             mJpegRConfigurations = checkArrayElementsNotNull(jpegRConfigurations,
303                     "jpegRConfigurations");
304             mJpegRMinFrameDurations = checkArrayElementsNotNull(jpegRMinFrameDurations,
305                     "jpegRFrameDurations");
306             mJpegRStallDurations = checkArrayElementsNotNull(jpegRStallDurations,
307                     "jpegRStallDurations");
308         }
309 
310         if (highSpeedVideoConfigurations == null) {
311             mHighSpeedVideoConfigurations = new HighSpeedVideoConfiguration[0];
312         } else {
313             mHighSpeedVideoConfigurations = checkArrayElementsNotNull(
314                     highSpeedVideoConfigurations, "highSpeedVideoConfigurations");
315         }
316 
317         // For each format, track how many sizes there are available to configure
318         for (StreamConfiguration config : mConfigurations) {
319             int fmt = config.getFormat();
320             SparseIntArray map = null;
321             if (config.isOutput()) {
322                 mAllOutputFormats.put(fmt, mAllOutputFormats.get(fmt) + 1);
323                 long duration = 0;
324                 if (mListHighResolution) {
325                     for (StreamConfigurationDuration configurationDuration : mMinFrameDurations) {
326                         if (configurationDuration.getFormat() == fmt &&
327                                 configurationDuration.getWidth() == config.getSize().getWidth() &&
328                                 configurationDuration.getHeight() == config.getSize().getHeight()) {
329                             duration = configurationDuration.getDuration();
330                             break;
331                         }
332                     }
333                 }
334                 map = duration <= DURATION_20FPS_NS ?
335                         mOutputFormats : mHighResOutputFormats;
336             } else {
337                 map = mInputFormats;
338             }
339             map.put(fmt, map.get(fmt) + 1);
340         }
341 
342         // For each depth format, track how many sizes there are available to configure
343         for (StreamConfiguration config : mDepthConfigurations) {
344             if (!config.isOutput()) {
345                 // Ignoring input depth configs
346                 continue;
347             }
348 
349             mDepthOutputFormats.put(config.getFormat(),
350                     mDepthOutputFormats.get(config.getFormat()) + 1);
351         }
352         for (StreamConfiguration config : mDynamicDepthConfigurations) {
353             if (!config.isOutput()) {
354                 // Ignoring input configs
355                 continue;
356             }
357 
358             mDynamicDepthOutputFormats.put(config.getFormat(),
359                     mDynamicDepthOutputFormats.get(config.getFormat()) + 1);
360         }
361 
362         // For each heic format, track how many sizes there are available to configure
363         for (StreamConfiguration config : mHeicConfigurations) {
364             if (!config.isOutput()) {
365                 // Ignoring input depth configs
366                 continue;
367             }
368 
369             mHeicOutputFormats.put(config.getFormat(),
370                     mHeicOutputFormats.get(config.getFormat()) + 1);
371         }
372 
373         if (Flags.cameraHeifGainmap()) {
374             // For each Heic UlrtaHDR format, track how many sizes there are available to configure
375             for (StreamConfiguration config : mHeicUltraHDRConfigurations) {
376                 if (!config.isOutput()) {
377                     // Ignoring input Heic UltraHDR configs
378                     continue;
379                 }
380 
381                 mHeicUltraHDROutputFormats.put(config.getFormat(),
382                         mHeicUltraHDROutputFormats.get(config.getFormat()) + 1);
383             }
384         }
385 
386         // For each Jpeg/R format, track how many sizes there are available to configure
387         for (StreamConfiguration config : mJpegRConfigurations) {
388             if (!config.isOutput()) {
389                 // Ignoring input Jpeg/R configs
390                 continue;
391             }
392 
393             mJpegROutputFormats.put(config.getFormat(),
394                     mJpegROutputFormats.get(config.getFormat()) + 1);
395         }
396 
397         if (configurations != null && enforceImplementationDefined &&
398                 mOutputFormats.indexOfKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) < 0) {
399             throw new AssertionError(
400                     "At least one stream configuration for IMPLEMENTATION_DEFINED must exist");
401         }
402 
403         // For each Size/FPS range, track how many FPS range/Size there are available
404         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
405             Size size = config.getSize();
406             Range<Integer> fpsRange = config.getFpsRange();
407             Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
408             if (fpsRangeCount == null) {
409                 fpsRangeCount = 0;
410             }
411             mHighSpeedVideoSizeMap.put(size, fpsRangeCount + 1);
412             Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
413             if (sizeCount == null) {
414                 sizeCount = 0;
415             }
416             mHighSpeedVideoFpsRangeMap.put(fpsRange, sizeCount + 1);
417         }
418 
419         mInputOutputFormatsMap = inputOutputFormatsMap;
420     }
421 
422     /**
423      * Get the image {@code format} output formats in this stream configuration.
424      *
425      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
426      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
427      *
428      * <p>Formats listed in this array are guaranteed to return true if queried with
429      * {@link #isOutputSupportedFor(int)}.</p>
430      *
431      * @return an array of integer format
432      *
433      * @see ImageFormat
434      * @see PixelFormat
435      */
getOutputFormats()436     public int[] getOutputFormats() {
437         return getPublicFormats(/*output*/true);
438     }
439 
440     /**
441      * Get the image {@code format} output formats for a reprocessing input format.
442      *
443      * <p>When submitting a {@link CaptureRequest} with an input Surface of a given format,
444      * the only allowed target outputs of the {@link CaptureRequest} are the ones with a format
445      * listed in the return value of this method. Including any other output Surface as a target
446      * will throw an IllegalArgumentException. If no output format is supported given the input
447      * format, an empty int[] will be returned.</p>
448      *
449      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
450      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
451      *
452      * <p>Formats listed in this array are guaranteed to return true if queried with
453      * {@link #isOutputSupportedFor(int)}.</p>
454      *
455      * @return an array of integer format
456      *
457      * @see ImageFormat
458      * @see PixelFormat
459      */
getValidOutputFormatsForInput(int inputFormat)460     public int[] getValidOutputFormatsForInput(int inputFormat) {
461         if (mInputOutputFormatsMap == null) {
462             return new int[0];
463         }
464 
465         int[] outputs = mInputOutputFormatsMap.getOutputs(inputFormat);
466         if (mHeicOutputFormats.size() > 0) {
467             // All reprocessing formats map contain JPEG.
468             int[] outputsWithHeic = Arrays.copyOf(outputs, outputs.length+1);
469             outputsWithHeic[outputs.length] = ImageFormat.HEIC;
470             return outputsWithHeic;
471         } else {
472             return outputs;
473         }
474     }
475 
476     /**
477      * Get the image {@code format} input formats in this stream configuration.
478      *
479      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
480      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
481      *
482      * @return an array of integer format
483      *
484      * @see ImageFormat
485      * @see PixelFormat
486      */
getInputFormats()487     public int[] getInputFormats() {
488         return getPublicFormats(/*output*/false);
489     }
490 
491     /**
492      * Get the supported input sizes for this input format.
493      *
494      * <p>The format must have come from {@link #getInputFormats}; otherwise
495      * {@code null} is returned.</p>
496      *
497      * @param format a format from {@link #getInputFormats}
498      * @return a non-empty array of sizes, or {@code null} if the format was not available.
499      */
getInputSizes(final int format)500     public Size[] getInputSizes(final int format) {
501         return getPublicFormatSizes(format, /*output*/false, /*highRes*/false);
502     }
503 
504     /**
505      * Determine whether or not output surfaces with a particular user-defined format can be passed
506      * {@link CameraDevice#createCaptureSession(SessionConfiguration) createCaptureSession}.
507      *
508      * <p>This method determines that the output {@code format} is supported by the camera device;
509      * each output {@code surface} target may or may not itself support that {@code format}.
510      * Refer to the class which provides the surface for additional documentation.</p>
511      *
512      * <p>Formats for which this returns {@code true} are guaranteed to exist in the result
513      * returned by {@link #getOutputSizes}.</p>
514      *
515      * @param format an image format from either {@link ImageFormat} or {@link PixelFormat}
516      * @return
517      *          {@code true} iff using a {@code surface} with this {@code format} will be
518      *          supported with {@link CameraDevice#createCaptureSession(SessionConfiguration)}
519      *
520      * @throws IllegalArgumentException
521      *          if the image format was not a defined named constant
522      *          from either {@link ImageFormat} or {@link PixelFormat}
523      *
524      * @see ImageFormat
525      * @see PixelFormat
526      * @see CameraDevice#createCaptureSession(SessionConfiguration)
527      */
isOutputSupportedFor(int format)528     public boolean isOutputSupportedFor(int format) {
529         checkArgumentFormat(format);
530 
531         int internalFormat = imageFormatToInternal(format);
532         int dataspace = imageFormatToDataspace(format);
533         if (Flags.cameraHeifGainmap()) {
534             if (dataspace == DataSpace.DATASPACE_HEIF_ULTRAHDR) {
535                 return mHeicUltraHDROutputFormats.indexOfKey(internalFormat) >= 0;
536             }
537         }
538         if (dataspace == HAL_DATASPACE_DEPTH) {
539             return mDepthOutputFormats.indexOfKey(internalFormat) >= 0;
540         } else if (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) {
541             return mDynamicDepthOutputFormats.indexOfKey(internalFormat) >= 0;
542         } else if (dataspace == HAL_DATASPACE_HEIF) {
543             return mHeicOutputFormats.indexOfKey(internalFormat) >= 0;
544         } else if (dataspace == HAL_DATASPACE_JPEG_R) {
545             return mJpegROutputFormats.indexOfKey(internalFormat) >= 0;
546         } else {
547             return getFormatsMap(/*output*/true).indexOfKey(internalFormat) >= 0;
548         }
549     }
550 
551     /**
552      * Determine whether or not output streams can be configured with a particular class
553      * as a consumer.
554      *
555      * <p>The following list is generally usable for outputs:
556      * <ul>
557      * <li>{@link android.media.ImageReader} -
558      * Recommended for image processing or streaming to external resources (such as a file or
559      * network)
560      * <li>{@link android.media.MediaRecorder} -
561      * Recommended for recording video (simple to use)
562      * <li>{@link android.media.MediaCodec} -
563      * Recommended for recording video (more complicated to use, with more flexibility)
564      * <li>{@link android.view.SurfaceHolder} -
565      * Recommended for low-power camera preview with {@link android.view.SurfaceView}
566      * <li>{@link android.graphics.SurfaceTexture} -
567      * Recommended for OpenGL-accelerated preview processing or compositing with
568      * {@link android.view.TextureView}
569      * </ul>
570      * </p>
571      *
572      * <p>Generally speaking this means that creating a {@link Surface} from that class <i>may</i>
573      * provide a producer endpoint that is suitable to be used with
574      * {@link CameraDevice#createCaptureSession(SessionConfiguration)}.</p>
575      *
576      * <p>Since not all of the above classes support output of all format and size combinations,
577      * the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.</p>
578      *
579      * @param klass a non-{@code null} {@link Class} object reference
580      * @return {@code true} if this class is supported as an output, {@code false} otherwise
581      *
582      * @throws NullPointerException if {@code klass} was {@code null}
583      *
584      * @see CameraDevice#createCaptureSession(SessionConfiguration)
585      * @see #isOutputSupportedFor(Surface)
586      */
isOutputSupportedFor(Class<T> klass)587     public static <T> boolean isOutputSupportedFor(Class<T> klass) {
588         Objects.requireNonNull(klass, "klass must not be null");
589 
590         if (klass == android.media.ImageReader.class) {
591             return true;
592         } else if (klass == android.media.MediaRecorder.class) {
593             return true;
594         } else if (klass == android.media.MediaCodec.class) {
595             return true;
596         } else if (klass == android.renderscript.Allocation.class) {
597             return true;
598         } else if (klass == android.view.SurfaceHolder.class) {
599             return true;
600         } else if (klass == android.graphics.SurfaceTexture.class) {
601             return true;
602         }
603 
604         return false;
605     }
606 
607     /**
608      * Determine whether or not the {@code surface} in its current
609      * state is suitable to be included in a {@link
610      * CameraDevice#createCaptureSession(SessionConfiguration) capture
611      * session} as an output.
612      *
613      * <p>Not all surfaces are usable with the {@link CameraDevice}, and not all configurations
614      * of that {@code surface} are compatible. Some classes that provide the {@code surface} are
615      * compatible with the {@link CameraDevice} in general
616      * (see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the
617      * {@code surface} into a state that will be compatible with the {@link CameraDevice}.</p>
618      *
619      * <p>Reasons for a {@code surface} being specifically incompatible might be:
620      * <ul>
621      * <li>Using a format that's not listed by {@link #getOutputFormats}
622      * <li>Using a format/size combination that's not listed by {@link #getOutputSizes}
623      * <li>The {@code surface} itself is not in a state where it can service a new producer.</p>
624      * </li>
625      * </ul>
626      *
627      * <p>Surfaces from flexible sources will return true even if the exact size of the Surface does
628      * not match a camera-supported size, as long as the format (or class) is supported and the
629      * camera device supports a size that is equal to or less than 1080p in that format. If such as
630      * Surface is used to create a capture session, it will have its size rounded to the nearest
631      * supported size, below or equal to 1080p. Flexible sources include SurfaceView, SurfaceTexture,
632      * and ImageReader.</p>
633      *
634      * <p>This is not an exhaustive list; see the particular class's documentation for further
635      * possible reasons of incompatibility.</p>
636      *
637      * @param surface a non-{@code null} {@link Surface} object reference
638      * @return {@code true} if this is supported, {@code false} otherwise
639      *
640      * @throws NullPointerException if {@code surface} was {@code null}
641      * @throws IllegalArgumentException if the Surface endpoint is no longer valid
642      *
643      * @see CameraDevice#createCaptureSession(SessionConfiguration)
644      * @see #isOutputSupportedFor(Class)
645      */
isOutputSupportedFor(Surface surface)646     public boolean isOutputSupportedFor(Surface surface) {
647         Objects.requireNonNull(surface, "surface must not be null");
648 
649         Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
650         int surfaceFormat = SurfaceUtils.getSurfaceFormat(surface);
651         int surfaceDataspace = SurfaceUtils.getSurfaceDataspace(surface);
652 
653         // See if consumer is flexible.
654         boolean isFlexible = SurfaceUtils.isFlexibleConsumer(surface);
655 
656         StreamConfiguration[] configs =
657                 surfaceDataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations :
658                 surfaceDataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations :
659                 surfaceDataspace == HAL_DATASPACE_HEIF ? mHeicConfigurations :
660                 surfaceDataspace == HAL_DATASPACE_JPEG_R ? mJpegRConfigurations :
661                 mConfigurations;
662         if (Flags.cameraHeifGainmap()) {
663             if (surfaceDataspace == DataSpace.DATASPACE_HEIF_ULTRAHDR) {
664                     configs = mHeicUltraHDRConfigurations;
665             }
666         }
667         for (StreamConfiguration config : configs) {
668             if (config.getFormat() == surfaceFormat && config.isOutput()) {
669                 // Matching format, either need exact size match, or a flexible consumer
670                 // and a size no bigger than MAX_DIMEN_FOR_ROUNDING
671                 if (config.getSize().equals(surfaceSize)) {
672                     return true;
673                 } else if (isFlexible &&
674                         (config.getSize().getWidth() <= MAX_DIMEN_FOR_ROUNDING)) {
675                     return true;
676                 }
677             }
678         }
679         return false;
680     }
681 
682     /**
683      * Determine whether or not the particular stream configuration is
684      * suitable to be included in a {@link
685      * CameraDevice#createCaptureSession(SessionConfiguration) capture
686      * session} as an output.
687      *
688      * @param size stream configuration size
689      * @param format stream configuration format
690      * @return {@code true} if this is supported, {@code false} otherwise
691      *
692      * @see CameraDevice#createCaptureSession(SessionConfiguration)
693      * @see #isOutputSupportedFor(Class)
694      * @hide
695      */
isOutputSupportedFor(Size size, int format)696     public boolean isOutputSupportedFor(Size size, int format) {
697         int internalFormat = imageFormatToInternal(format);
698         int dataspace = imageFormatToDataspace(format);
699 
700         StreamConfiguration[] configs =
701                 dataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations :
702                 dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations :
703                 dataspace == HAL_DATASPACE_HEIF ? mHeicConfigurations :
704                 dataspace == HAL_DATASPACE_JPEG_R ? mJpegRConfigurations :
705                 mConfigurations;
706         if (Flags.cameraHeifGainmap()) {
707             if (dataspace == DataSpace.DATASPACE_HEIF_ULTRAHDR ) {
708                 configs = mHeicUltraHDRConfigurations;
709             }
710         }
711         for (StreamConfiguration config : configs) {
712             if ((config.getFormat() == internalFormat) && config.isOutput() &&
713                     config.getSize().equals(size)) {
714                 return true;
715             }
716         }
717 
718         return false;
719     }
720 
721     /**
722      * Get a list of sizes compatible with {@code klass} to use as an output.
723      *
724      * <p>Some of the supported classes may support additional formats beyond
725      * {@link ImageFormat#PRIVATE}; this function only returns
726      * sizes for {@link ImageFormat#PRIVATE}. For example, {@link android.media.ImageReader}
727      * supports {@link ImageFormat#YUV_420_888} and {@link ImageFormat#PRIVATE}, this method will
728      * only return the sizes for {@link ImageFormat#PRIVATE} for {@link android.media.ImageReader}
729      * class.</p>
730      *
731      * <p>If a well-defined format such as {@code NV21} is required, use
732      * {@link #getOutputSizes(int)} instead.</p>
733      *
734      * <p>The {@code klass} should be a supported output, that querying
735      * {@code #isOutputSupportedFor(Class)} should return {@code true}.</p>
736      *
737      * @param klass
738      *          a non-{@code null} {@link Class} object reference
739      * @return
740      *          an array of supported sizes for {@link ImageFormat#PRIVATE} format,
741      *          or {@code null} iff the {@code klass} is not a supported output.
742      *
743      *
744      * @throws NullPointerException if {@code klass} was {@code null}
745      *
746      * @see #isOutputSupportedFor(Class)
747      */
getOutputSizes(Class<T> klass)748     public <T> Size[] getOutputSizes(Class<T> klass) {
749         if (isOutputSupportedFor(klass) == false) {
750             return null;
751         }
752 
753         return getInternalFormatSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
754                 HAL_DATASPACE_UNKNOWN,/*output*/true, /*highRes*/false);
755     }
756 
757     /**
758      * Get a list of sizes compatible with the requested image {@code format}.
759      *
760      * <p>The {@code format} should be a supported format (one of the formats returned by
761      * {@link #getOutputFormats}).</p>
762      *
763      * As of API level 23, the {@link #getHighResolutionOutputSizes} method can be used on devices
764      * that support the
765      * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
766      * capability to get a list of high-resolution output sizes that cannot operate at the preferred
767      * 20fps rate. This means that for some supported formats, this method will return an empty
768      * list, if all the supported resolutions operate at below 20fps.  For devices that do not
769      * support the BURST_CAPTURE capability, all output resolutions are listed through this method.
770      *
771      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
772      * @return
773      *          an array of supported sizes,
774      *          or {@code null} if the {@code format} is not a supported output
775      *
776      * @see ImageFormat
777      * @see PixelFormat
778      * @see #getOutputFormats
779      */
getOutputSizes(int format)780     public Size[] getOutputSizes(int format) {
781         return getPublicFormatSizes(format, /*output*/true, /*highRes*/ false);
782     }
783 
784     /**
785      * Get a list of supported high speed video recording sizes.
786      * <p>
787      * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
788      * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
789      * list the supported high speed video size configurations. All the sizes listed will be a
790      * subset of the sizes reported by {@link #getOutputSizes} for processed non-stalling formats
791      * (typically {@link ImageFormat#PRIVATE} {@link ImageFormat#YUV_420_888}, etc.)
792      * </p>
793      * <p>
794      * To enable high speed video recording, application must create a constrained create high speed
795      * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
796      * a CaptureRequest list created by
797      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
798      * to this session. The application must select the video size from this method and
799      * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
800      * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
801      * generate the high speed request list. For example, if the application intends to do high
802      * speed recording, it can select the maximum size reported by this method to create high speed
803      * capture session. Note that for the use case of multiple output streams, application must
804      * select one unique size from this method to use (e.g., preview and recording streams must have
805      * the same size). Otherwise, the high speed session creation will fail. Once the size is
806      * selected, application can get the supported FPS ranges by
807      * {@link #getHighSpeedVideoFpsRangesFor}, and use these FPS ranges to setup the recording
808      * request lists via
809      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
810      * </p>
811      *
812      * <p>This function returns an empty array if
813      * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO}
814      * is not supported.</p>
815      *
816      * @return an array of supported high speed video recording sizes
817      * @see #getHighSpeedVideoFpsRangesFor(Size)
818      * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
819      * @see CameraDevice#createConstrainedHighSpeedCaptureSession
820      * @see android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList
821      */
getHighSpeedVideoSizes()822     public Size[] getHighSpeedVideoSizes() {
823         Set<Size> keySet = mHighSpeedVideoSizeMap.keySet();
824         return keySet.toArray(new Size[keySet.size()]);
825     }
826 
827     /**
828      * Get the frame per second ranges (fpsMin, fpsMax) for input high speed video size.
829      * <p>
830      * See {@link #getHighSpeedVideoFpsRanges} for how to enable high speed recording.
831      * </p>
832      * <p>
833      * The {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS ranges} reported in this method
834      * must not be used to setup capture requests that are submitted to unconstrained capture
835      * sessions, or it will result in {@link IllegalArgumentException IllegalArgumentExceptions}.
836      * </p>
837      * <p>
838      * See {@link #getHighSpeedVideoFpsRanges} for the characteristics of the returned FPS ranges.
839      * </p>
840      *
841      * @param size one of the sizes returned by {@link #getHighSpeedVideoSizes()}
842      * @return an array of supported high speed video recording FPS ranges The upper bound of
843      *         returned ranges is guaranteed to be greater than or equal to 120.
844      * @throws IllegalArgumentException if input size does not exist in the return value of
845      *             getHighSpeedVideoSizes
846      * @see #getHighSpeedVideoSizes()
847      * @see #getHighSpeedVideoFpsRanges()
848      */
getHighSpeedVideoFpsRangesFor(Size size)849     public Range<Integer>[] getHighSpeedVideoFpsRangesFor(Size size) {
850         Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
851         if (fpsRangeCount == null || fpsRangeCount == 0) {
852             throw new IllegalArgumentException(String.format(
853                     "Size %s does not support high speed video recording", size));
854         }
855 
856         @SuppressWarnings("unchecked")
857         Range<Integer>[] fpsRanges = new Range[fpsRangeCount];
858         int i = 0;
859         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
860             if (size.equals(config.getSize())) {
861                 fpsRanges[i++] = config.getFpsRange();
862             }
863         }
864         return fpsRanges;
865     }
866 
867     /**
868      * Get a list of supported high speed video recording FPS ranges.
869      * <p>
870      * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
871      * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
872      * list the supported high speed video FPS range configurations. Application can then use
873      * {@link #getHighSpeedVideoSizesFor} to query available sizes for one of returned FPS range.
874      * </p>
875      * <p>
876      * To enable high speed video recording, application must create a constrained create high speed
877      * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
878      * a CaptureRequest list created by
879      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
880      * to this session. The application must select the video size from this method and
881      * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
882      * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
883      * generate the high speed request list. For example, if the application intends to do high
884      * speed recording, it can select one FPS range reported by this method, query the video sizes
885      * corresponding to this FPS range by {@link #getHighSpeedVideoSizesFor} and use one of reported
886      * sizes to create a high speed capture session. Note that for the use case of multiple output
887      * streams, application must select one unique size from this method to use (e.g., preview and
888      * recording streams must have the same size). Otherwise, the high speed session creation will
889      * fail. Once the high speed capture session is created, the application can set the FPS range
890      * in the recording request lists via
891      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
892      * </p>
893      * <p>
894      * The FPS ranges reported by this method will have below characteristics:
895      * <li>The fpsMin and fpsMax will be a multiple 30fps.</li>
896      * <li>The fpsMin will be no less than 30fps, the fpsMax will be no less than 120fps.</li>
897      * <li>At least one range will be a fixed FPS range where fpsMin == fpsMax.</li>
898      * <li>For each fixed FPS range, there will be one corresponding variable FPS range
899      * [30, fps_max] or [60, fps_max]. These kinds of FPS ranges are suitable for preview-only
900      * use cases where the application doesn't want the camera device always produce higher frame
901      * rate than the display refresh rate. Both 30fps and 60fps preview rate will not be
902      * supported for the same recording rate.</li>
903      * </p>
904      *
905      * <p>This function returns an empty array if
906      * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO}
907      * is not supported.</p>
908      *
909      * @return an array of supported high speed video recording FPS ranges The upper bound of
910      *         returned ranges is guaranteed to be larger or equal to 120.
911      * @see #getHighSpeedVideoSizesFor
912      * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
913      * @see CameraDevice#createConstrainedHighSpeedCaptureSession
914      * @see android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList
915      */
916     @SuppressWarnings("unchecked")
getHighSpeedVideoFpsRanges()917     public Range<Integer>[] getHighSpeedVideoFpsRanges() {
918         Set<Range<Integer>> keySet = mHighSpeedVideoFpsRangeMap.keySet();
919         return keySet.toArray(new Range[keySet.size()]);
920     }
921 
922     /**
923      * Get the supported video sizes for an input high speed FPS range.
924      *
925      * <p> See {@link #getHighSpeedVideoSizes} for how to enable high speed recording.</p>
926      *
927      * @param fpsRange one of the FPS range returned by {@link #getHighSpeedVideoFpsRanges()}
928      * @return An array of video sizes to create high speed capture sessions for high speed streaming
929      *         use cases.
930      *
931      * @throws IllegalArgumentException if input FPS range does not exist in the return value of
932      *         getHighSpeedVideoFpsRanges
933      * @see #getHighSpeedVideoFpsRanges()
934      */
getHighSpeedVideoSizesFor(Range<Integer> fpsRange)935     public Size[] getHighSpeedVideoSizesFor(Range<Integer> fpsRange) {
936         Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
937         if (sizeCount == null || sizeCount == 0) {
938             throw new IllegalArgumentException(String.format(
939                     "FpsRange %s does not support high speed video recording", fpsRange));
940         }
941 
942         Size[] sizes = new Size[sizeCount];
943         int i = 0;
944         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
945             if (fpsRange.equals(config.getFpsRange())) {
946                 sizes[i++] = config.getSize();
947             }
948         }
949         return sizes;
950     }
951 
952     /**
953      * Get a list of supported high resolution sizes, which cannot operate at full BURST_CAPTURE
954      * rate.
955      *
956      * <p>This includes all output sizes that cannot meet the 20 fps frame rate requirements for the
957      * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
958      * capability.  This does not include the stall duration, so for example, a JPEG or RAW16 output
959      * resolution with a large stall duration but a minimum frame duration that's above 20 fps will
960      * still be listed in the regular {@link #getOutputSizes} list. All the sizes on this list that
961      * are less than 24 megapixels are still guaranteed to operate at a rate of at least 10 fps,
962      * not including stall duration. Sizes on this list that are at least 24 megapixels are allowed
963      * to operate at less than 10 fps.</p>
964      *
965      * <p>For a device that does not support the BURST_CAPTURE capability, this list will be
966      * {@code null}, since resolutions in the {@link #getOutputSizes} list are already not
967      * guaranteed to meet &gt;= 20 fps rate requirements. For a device that does support the
968      * BURST_CAPTURE capability, this list may be empty, if all supported resolutions meet the 20
969      * fps requirement.</p>
970      *
971      * @return an array of supported slower high-resolution sizes, or {@code null} if the
972      *         BURST_CAPTURE capability is not supported
973      */
getHighResolutionOutputSizes(int format)974     public Size[] getHighResolutionOutputSizes(int format) {
975         if (!mListHighResolution) return null;
976 
977         return getPublicFormatSizes(format, /*output*/true, /*highRes*/ true);
978     }
979 
980     /**
981      * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
982      * for the format/size combination (in nanoseconds).
983      *
984      * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
985      * <p>{@code size} should be one of the ones returned by
986      * {@link #getOutputSizes(int)}.</p>
987      *
988      * <p>This corresponds to the minimum frame duration (maximum frame rate) possible when only
989      * that stream is configured in a session, with all processing (typically in
990      * {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.  </p>
991      *
992      * <p>When multiple streams are used in a session, the minimum frame duration will be
993      * {@code max(individual stream min durations)}.  See {@link #getOutputStallDuration} for
994      * details of timing for formats that may cause frame rate slowdown when they are targeted by a
995      * capture request.</p>
996      *
997      * <p>For devices that do not support manual sensor control
998      * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
999      * this function may return 0.</p>
1000      *
1001      * <p>The minimum frame duration of a stream (of a particular format, size) is the same
1002      * regardless of whether the stream is input or output.</p>
1003      *
1004      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
1005      * @param size an output-compatible size
1006      * @return a minimum frame duration {@code >} 0 in nanoseconds, or
1007      *          0 if the minimum frame duration is not available.
1008      *
1009      * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
1010      * @throws NullPointerException if {@code size} was {@code null}
1011      *
1012      * @see CaptureRequest#SENSOR_FRAME_DURATION
1013      * @see #getOutputStallDuration(int, Size)
1014      * @see ImageFormat
1015      * @see PixelFormat
1016      */
getOutputMinFrameDuration(int format, Size size)1017     public long getOutputMinFrameDuration(int format, Size size) {
1018         Objects.requireNonNull(size, "size must not be null");
1019         checkArgumentFormatSupported(format, /*output*/true);
1020 
1021         return getInternalFormatDuration(imageFormatToInternal(format),
1022                 imageFormatToDataspace(format),
1023                 size,
1024                 DURATION_MIN_FRAME);
1025     }
1026 
1027     /**
1028      * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
1029      * for the class/size combination (in nanoseconds).
1030      *
1031      * <p>This assumes that the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
1032      * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
1033      *
1034      * <p>{@code klass} should be one of the ones which is supported by
1035      * {@link #isOutputSupportedFor(Class)}.</p>
1036      *
1037      * <p>{@code size} should be one of the ones returned by
1038      * {@link #getOutputSizes(int)}.</p>
1039      *
1040      * <p>This corresponds to the minimum frame duration (maximum frame rate) possible when only
1041      * that stream is configured in a session, with all processing (typically in
1042      * {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.  </p>
1043      *
1044      * <p>When multiple streams are used in a session, the minimum frame duration will be
1045      * {@code max(individual stream min durations)}.  See {@link #getOutputStallDuration} for
1046      * details of timing for formats that may cause frame rate slowdown when they are targeted by a
1047      * capture request.</p>
1048      *
1049      * <p>For devices that do not support manual sensor control
1050      * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
1051      * this function may return 0.</p>
1052      *
1053      * <p>The minimum frame duration of a stream (of a particular format, size) is the same
1054      * regardless of whether the stream is input or output.</p>
1055      *
1056      * @param klass
1057      *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
1058      *          non-empty array returned by {@link #getOutputSizes(Class)}
1059      * @param size an output-compatible size
1060      * @return a minimum frame duration {@code >} 0 in nanoseconds, or
1061      *          0 if the minimum frame duration is not available.
1062      *
1063      * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
1064      * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
1065      *
1066      * @see CaptureRequest#SENSOR_FRAME_DURATION
1067      * @see ImageFormat
1068      * @see PixelFormat
1069      */
getOutputMinFrameDuration(final Class<T> klass, final Size size)1070     public <T> long getOutputMinFrameDuration(final Class<T> klass, final Size size) {
1071         if (!isOutputSupportedFor(klass)) {
1072             throw new IllegalArgumentException("klass was not supported");
1073         }
1074 
1075         return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1076                 HAL_DATASPACE_UNKNOWN,
1077                 size, DURATION_MIN_FRAME);
1078     }
1079 
1080     /**
1081      * Get the stall duration for the format/size combination (in nanoseconds).
1082      *
1083      * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
1084      * <p>{@code size} should be one of the ones returned by
1085      * {@link #getOutputSizes(int)}.</p>
1086      *
1087      * <p>
1088      * A stall duration is how much extra time would get added to the normal minimum frame duration
1089      * for a repeating request that has streams with non-zero stall.
1090      *
1091      * <p>For example, consider JPEG captures which have the following characteristics:
1092      *
1093      * <ul>
1094      * <li>JPEG streams act like processed YUV streams in requests for which they are not included;
1095      * in requests in which they are directly referenced, they act as JPEG streams.
1096      * This is because supporting a JPEG stream requires the underlying YUV data to always be ready
1097      * for use by a JPEG encoder, but the encoder will only be used (and impact frame duration) on
1098      * requests that actually reference a JPEG stream.
1099      * <li>The JPEG processor can run concurrently to the rest of the camera pipeline, but cannot
1100      * process more than 1 capture at a time.
1101      * </ul>
1102      *
1103      * <p>In other words, using a repeating YUV request would result in a steady frame rate
1104      * (let's say it's 30 FPS). If a single JPEG request is submitted periodically,
1105      * the frame rate will stay at 30 FPS (as long as we wait for the previous JPEG to return each
1106      * time). If we try to submit a repeating YUV + JPEG request, then the frame rate will drop from
1107      * 30 FPS.</p>
1108      *
1109      * <p>In general, submitting a new request with a non-0 stall time stream will <em>not</em> cause a
1110      * frame rate drop unless there are still outstanding buffers for that stream from previous
1111      * requests.</p>
1112      *
1113      * <p>Submitting a repeating request with streams (call this {@code S}) is the same as setting
1114      * the minimum frame duration from the normal minimum frame duration corresponding to {@code S},
1115      * added with the maximum stall duration for {@code S}.</p>
1116      *
1117      * <p>If interleaving requests with and without a stall duration, a request will stall by the
1118      * maximum of the remaining times for each can-stall stream with outstanding buffers.</p>
1119      *
1120      * <p>This means that a stalling request will not have an exposure start until the stall has
1121      * completed.</p>
1122      *
1123      * <p>This should correspond to the stall duration when only that stream is active, with all
1124      * processing (typically in {@code android.*.mode}) set to {@code FAST} or {@code OFF}.
1125      * Setting any of the processing modes to {@code HIGH_QUALITY} effectively results in an
1126      * indeterminate stall duration for all streams in a request (the regular stall calculation
1127      * rules are ignored).</p>
1128      *
1129      * <p>The following formats may always have a stall duration:
1130      * <ul>
1131      * <li>{@link ImageFormat#JPEG JPEG}
1132      * <li>{@link ImageFormat#RAW_SENSOR RAW16}
1133      * <li>{@link ImageFormat#RAW_PRIVATE RAW_PRIVATE}
1134      * </ul>
1135      * </p>
1136      *
1137      * <p>The following formats will never have a stall duration:
1138      * <ul>
1139      * <li>{@link ImageFormat#YUV_420_888 YUV_420_888}
1140      * <li>{@link ImageFormat#PRIVATE PRIVATE}
1141      * </ul></p>
1142      *
1143      * <p>
1144      * All other formats may or may not have an allowed stall duration on a per-capability basis;
1145      * refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
1146      * android.request.availableCapabilities} for more details.</p>
1147      * </p>
1148      *
1149      * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
1150      * for more information about calculating the max frame rate (absent stalls).</p>
1151      *
1152      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
1153      * @param size an output-compatible size
1154      * @return a stall duration {@code >=} 0 in nanoseconds
1155      *
1156      * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
1157      * @throws NullPointerException if {@code size} was {@code null}
1158      *
1159      * @see CaptureRequest#SENSOR_FRAME_DURATION
1160      * @see ImageFormat
1161      * @see PixelFormat
1162      */
getOutputStallDuration(int format, Size size)1163     public long getOutputStallDuration(int format, Size size) {
1164         checkArgumentFormatSupported(format, /*output*/true);
1165 
1166         return getInternalFormatDuration(imageFormatToInternal(format),
1167                 imageFormatToDataspace(format),
1168                 size,
1169                 DURATION_STALL);
1170     }
1171 
1172     /**
1173      * Get the stall duration for the class/size combination (in nanoseconds).
1174      *
1175      * <p>This assumes that the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
1176      * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
1177      *
1178      * <p>{@code klass} should be one of the ones with a non-empty array returned by
1179      * {@link #getOutputSizes(Class)}.</p>
1180      *
1181      * <p>{@code size} should be one of the ones returned by
1182      * {@link #getOutputSizes(Class)}.</p>
1183      *
1184      * <p>See {@link #getOutputStallDuration(int, Size)} for a definition of a
1185      * <em>stall duration</em>.</p>
1186      *
1187      * @param klass
1188      *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
1189      *          non-empty array returned by {@link #getOutputSizes(Class)}
1190      * @param size an output-compatible size
1191      * @return a minimum frame duration {@code >=} 0 in nanoseconds
1192      *
1193      * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
1194      * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
1195      *
1196      * @see CaptureRequest#SENSOR_FRAME_DURATION
1197      * @see ImageFormat
1198      * @see PixelFormat
1199      */
getOutputStallDuration(final Class<T> klass, final Size size)1200     public <T> long getOutputStallDuration(final Class<T> klass, final Size size) {
1201         if (!isOutputSupportedFor(klass)) {
1202             throw new IllegalArgumentException("klass was not supported");
1203         }
1204 
1205         return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1206                 HAL_DATASPACE_UNKNOWN, size, DURATION_STALL);
1207     }
1208 
1209     /**
1210      * Check if this {@link StreamConfigurationMap} is equal to another
1211      * {@link StreamConfigurationMap}.
1212      *
1213      * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
1214      *
1215      * @return {@code true} if the objects were equal, {@code false} otherwise
1216      */
1217     @Override
equals(final Object obj)1218     public boolean equals(final Object obj) {
1219         if (obj == null) {
1220             return false;
1221         }
1222         if (this == obj) {
1223             return true;
1224         }
1225         if (obj instanceof StreamConfigurationMap) {
1226             final StreamConfigurationMap other = (StreamConfigurationMap) obj;
1227             // XX: do we care about order?
1228             return Arrays.equals(mConfigurations, other.mConfigurations) &&
1229                     Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) &&
1230                     Arrays.equals(mStallDurations, other.mStallDurations) &&
1231                     Arrays.equals(mDepthConfigurations, other.mDepthConfigurations) &&
1232                     Arrays.equals(mDepthMinFrameDurations, other.mDepthMinFrameDurations) &&
1233                     Arrays.equals(mDepthStallDurations, other.mDepthStallDurations) &&
1234                     Arrays.equals(mDynamicDepthConfigurations, other.mDynamicDepthConfigurations) &&
1235                     Arrays.equals(mDynamicDepthMinFrameDurations,
1236                             other.mDynamicDepthMinFrameDurations) &&
1237                     Arrays.equals(mDynamicDepthStallDurations, other.mDynamicDepthStallDurations) &&
1238                     Arrays.equals(mHeicConfigurations, other.mHeicConfigurations) &&
1239                     Arrays.equals(mHeicMinFrameDurations, other.mHeicMinFrameDurations) &&
1240                     Arrays.equals(mHeicStallDurations, other.mHeicStallDurations) &&
1241                     Arrays.equals(mHeicUltraHDRConfigurations, other.mHeicUltraHDRConfigurations) &&
1242                     Arrays.equals(mHeicUltraHDRMinFrameDurations,
1243                             other.mHeicUltraHDRMinFrameDurations) &&
1244                     Arrays.equals(mHeicUltraHDRStallDurations, other.mHeicUltraHDRStallDurations) &&
1245                     Arrays.equals(mJpegRConfigurations, other.mJpegRConfigurations) &&
1246                     Arrays.equals(mJpegRMinFrameDurations, other.mJpegRMinFrameDurations) &&
1247                     Arrays.equals(mJpegRStallDurations, other.mJpegRStallDurations) &&
1248                     Arrays.equals(mHighSpeedVideoConfigurations,
1249                             other.mHighSpeedVideoConfigurations);
1250         }
1251         return false;
1252     }
1253 
1254     /**
1255      * {@inheritDoc}
1256      */
1257     @Override
hashCode()1258     public int hashCode() {
1259         // XX: do we care about order?
1260         return HashCodeHelpers.hashCodeGeneric(
1261                 mConfigurations, mMinFrameDurations, mStallDurations,
1262                 mDepthConfigurations, mDepthMinFrameDurations, mDepthStallDurations,
1263                 mDynamicDepthConfigurations, mDynamicDepthMinFrameDurations,
1264                 mDynamicDepthStallDurations, mHeicConfigurations,
1265                 mHeicMinFrameDurations, mHeicStallDurations,
1266                 mHeicUltraHDRConfigurations, mHeicUltraHDRMinFrameDurations,
1267                 mHeicUltraHDRStallDurations, mJpegRConfigurations, mJpegRMinFrameDurations,
1268                 mJpegRStallDurations, mHighSpeedVideoConfigurations);
1269     }
1270 
1271     // Check that the argument is supported by #getOutputFormats or #getInputFormats
checkArgumentFormatSupported(int format, boolean output)1272     private int checkArgumentFormatSupported(int format, boolean output) {
1273         checkArgumentFormat(format);
1274 
1275         int internalFormat = imageFormatToInternal(format);
1276         int internalDataspace = imageFormatToDataspace(format);
1277 
1278         if (output) {
1279             if (Flags.cameraHeifGainmap()) {
1280                 if (internalDataspace == DataSpace.DATASPACE_HEIF_ULTRAHDR) {
1281                     if (mHeicUltraHDROutputFormats.indexOfKey(internalFormat) >= 0) {
1282                         return format;
1283                     }
1284                 }
1285             }
1286             if (internalDataspace == HAL_DATASPACE_DEPTH) {
1287                 if (mDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
1288                     return format;
1289                 }
1290             } else if (internalDataspace == HAL_DATASPACE_DYNAMIC_DEPTH) {
1291                 if (mDynamicDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
1292                     return format;
1293                 }
1294             } else if (internalDataspace == HAL_DATASPACE_HEIF) {
1295                 if (mHeicOutputFormats.indexOfKey(internalFormat) >= 0) {
1296                     return format;
1297                 }
1298             } else if (internalDataspace == HAL_DATASPACE_JPEG_R) {
1299                 if (mJpegROutputFormats.indexOfKey(internalFormat) >= 0) {
1300                     return format;
1301                 }
1302             } else {
1303                 if (mAllOutputFormats.indexOfKey(internalFormat) >= 0) {
1304                     return format;
1305                 }
1306             }
1307         } else {
1308             if (mInputFormats.indexOfKey(internalFormat) >= 0) {
1309                 return format;
1310             }
1311         }
1312 
1313         throw new IllegalArgumentException(String.format(
1314                 "format %x is not supported by this stream configuration map", format));
1315     }
1316 
1317     /**
1318      * Ensures that the format is either user-defined or implementation defined.
1319      *
1320      * <p>If a format has a different internal representation than the public representation,
1321      * passing in the public representation here will fail.</p>
1322      *
1323      * <p>For example if trying to use {@link ImageFormat#JPEG}:
1324      * it has a different public representation than the internal representation
1325      * {@code HAL_PIXEL_FORMAT_BLOB}, this check will fail.</p>
1326      *
1327      * <p>Any invalid/undefined formats will raise an exception.</p>
1328      *
1329      * @param format image format
1330      * @return the format
1331      *
1332      * @throws IllegalArgumentException if the format was invalid
1333      */
checkArgumentFormatInternal(int format)1334     static int checkArgumentFormatInternal(int format) {
1335         switch (format) {
1336             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1337             case HAL_PIXEL_FORMAT_BLOB:
1338             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1339             case HAL_PIXEL_FORMAT_Y16:
1340                 return format;
1341             case ImageFormat.JPEG:
1342             case ImageFormat.HEIC:
1343                 throw new IllegalArgumentException(
1344                         "An unknown internal format: " + format);
1345             default:
1346                 return checkArgumentFormat(format);
1347         }
1348     }
1349 
1350     /**
1351      * Ensures that the format is publicly user-defined in either ImageFormat or PixelFormat.
1352      *
1353      * <p>If a format has a different public representation than the internal representation,
1354      * passing in the internal representation here will fail.</p>
1355      *
1356      * <p>For example if trying to use {@code HAL_PIXEL_FORMAT_BLOB}:
1357      * it has a different internal representation than the public representation
1358      * {@link ImageFormat#JPEG}, this check will fail.</p>
1359      *
1360      * <p>Any invalid/undefined formats will raise an exception, including implementation-defined.
1361      * </p>
1362      *
1363      * <p>Note that {@code @hide} and deprecated formats will not pass this check.</p>
1364      *
1365      * @param format image format
1366      * @return the format
1367      *
1368      * @throws IllegalArgumentException if the format was not user-defined
1369      */
checkArgumentFormat(int format)1370     static int checkArgumentFormat(int format) {
1371         if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
1372             throw new IllegalArgumentException(String.format(
1373                     "format 0x%x was not defined in either ImageFormat or PixelFormat", format));
1374         }
1375 
1376         return format;
1377     }
1378 
1379     /**
1380      * Convert an internal format compatible with {@code graphics.h} into public-visible
1381      * {@code ImageFormat}. This assumes the dataspace of the format is not HAL_DATASPACE_DEPTH.
1382      *
1383      * <p>In particular these formats are converted:
1384      * <ul>
1385      * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.JPEG</li>
1386      * </ul>
1387      * </p>
1388      *
1389      * <p>Passing in a format which has no public equivalent will fail;
1390      * as will passing in a public format which has a different internal format equivalent.
1391      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1392      *
1393      * <p>All other formats are returned as-is, no further invalid check is performed.</p>
1394      *
1395      * <p>This function is the dual of {@link #imageFormatToInternal} for dataspaces other than
1396      * HAL_DATASPACE_DEPTH.</p>
1397      *
1398      * @param format image format from {@link ImageFormat} or {@link PixelFormat}
1399      * @return the converted image formats
1400      *
1401      * @throws IllegalArgumentException
1402      *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
1403      *          {@link ImageFormat#JPEG}
1404      *
1405      * @see ImageFormat
1406      * @see PixelFormat
1407      * @see #checkArgumentFormat
1408      * @hide
1409      */
imageFormatToPublic(int format)1410     public static int imageFormatToPublic(int format) {
1411         switch (format) {
1412             case HAL_PIXEL_FORMAT_BLOB:
1413                 return ImageFormat.JPEG;
1414             case ImageFormat.JPEG:
1415                 throw new IllegalArgumentException(
1416                         "ImageFormat.JPEG is an unknown internal format");
1417             default:
1418                 return format;
1419         }
1420     }
1421 
1422     /**
1423      * Convert an internal format compatible with {@code graphics.h} into public-visible
1424      * {@code ImageFormat}. This assumes the dataspace of the format is HAL_DATASPACE_DEPTH.
1425      *
1426      * <p>In particular these formats are converted:
1427      * <ul>
1428      * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.DEPTH_POINT_CLOUD
1429      * <li>HAL_PIXEL_FORMAT_Y16 => ImageFormat.DEPTH16
1430      * </ul>
1431      * </p>
1432      *
1433      * <p>Passing in an implementation-defined format which has no public equivalent will fail;
1434      * as will passing in a public format which has a different internal format equivalent.
1435      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1436      *
1437      * <p>All other formats are returned as-is, no further invalid check is performed.</p>
1438      *
1439      * <p>This function is the dual of {@link #imageFormatToInternal} for formats associated with
1440      * HAL_DATASPACE_DEPTH.</p>
1441      *
1442      * @param format image format from {@link ImageFormat} or {@link PixelFormat}
1443      * @return the converted image formats
1444      *
1445      * @throws IllegalArgumentException
1446      *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
1447      *          {@link ImageFormat#JPEG}
1448      *
1449      * @see ImageFormat
1450      * @see PixelFormat
1451      * @see #checkArgumentFormat
1452      * @hide
1453      */
depthFormatToPublic(int format)1454     public static int depthFormatToPublic(int format) {
1455         switch (format) {
1456             case HAL_PIXEL_FORMAT_BLOB:
1457                 return ImageFormat.DEPTH_POINT_CLOUD;
1458             case HAL_PIXEL_FORMAT_Y16:
1459                 return ImageFormat.DEPTH16;
1460             case HAL_PIXEL_FORMAT_RAW16:
1461                 return ImageFormat.RAW_DEPTH;
1462             case HAL_PIXEL_FORMAT_RAW10:
1463                 return ImageFormat.RAW_DEPTH10;
1464             case ImageFormat.JPEG:
1465                 throw new IllegalArgumentException(
1466                         "ImageFormat.JPEG is an unknown internal format");
1467             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1468                 throw new IllegalArgumentException(
1469                         "IMPLEMENTATION_DEFINED must not leak to public API");
1470             default:
1471                 throw new IllegalArgumentException(
1472                         "Unknown DATASPACE_DEPTH format " + format);
1473         }
1474     }
1475 
1476     /**
1477      * Convert image formats from internal to public formats (in-place).
1478      *
1479      * @param formats an array of image formats
1480      * @return {@code formats}
1481      *
1482      * @see #imageFormatToPublic
1483      */
imageFormatToPublic(int[] formats)1484     static int[] imageFormatToPublic(int[] formats) {
1485         if (formats == null) {
1486             return null;
1487         }
1488 
1489         for (int i = 0; i < formats.length; ++i) {
1490             formats[i] = imageFormatToPublic(formats[i]);
1491         }
1492 
1493         return formats;
1494     }
1495 
1496     /**
1497      * Convert a public format compatible with {@code ImageFormat} to an internal format
1498      * from {@code graphics.h}.
1499      *
1500      * <p>In particular these formats are converted:
1501      * <ul>
1502      * <li>ImageFormat.JPEG => HAL_PIXEL_FORMAT_BLOB
1503      * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_PIXEL_FORMAT_BLOB
1504      * <li>ImageFormat.DEPTH_JPEG => HAL_PIXEL_FORMAT_BLOB
1505      * <li>ImageFormat.HEIC => HAL_PIXEL_FORMAT_BLOB
1506      * <li>ImageFormat.HEIC_ULTRAHDR => HAL_PIXEL_FORMAT_BLOB
1507      * <li>ImageFormat.JPEG_R => HAL_PIXEL_FORMAT_BLOB
1508      * <li>ImageFormat.DEPTH16 => HAL_PIXEL_FORMAT_Y16
1509      * </ul>
1510      * </p>
1511      *
1512      * <p>Passing in an internal format which has a different public format equivalent will fail.
1513      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1514      *
1515      * <p>All other formats are returned as-is, no invalid check is performed.</p>
1516      *
1517      * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1518      *
1519      * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1520      * @return the converted image formats
1521      *
1522      * @see ImageFormat
1523      * @see PixelFormat
1524      *
1525      * @throws IllegalArgumentException
1526      *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1527      */
imageFormatToInternal(int format)1528     static int imageFormatToInternal(int format) {
1529         if (Flags.cameraHeifGainmap()) {
1530            if (format == ImageFormat.HEIC_ULTRAHDR) {
1531                return HAL_PIXEL_FORMAT_BLOB;
1532            }
1533         }
1534         switch (format) {
1535             case ImageFormat.JPEG:
1536             case ImageFormat.DEPTH_POINT_CLOUD:
1537             case ImageFormat.DEPTH_JPEG:
1538             case ImageFormat.HEIC:
1539             case ImageFormat.JPEG_R:
1540                 return HAL_PIXEL_FORMAT_BLOB;
1541             case ImageFormat.DEPTH16:
1542                 return HAL_PIXEL_FORMAT_Y16;
1543             case ImageFormat.RAW_DEPTH:
1544                 return HAL_PIXEL_FORMAT_RAW16;
1545             case ImageFormat.RAW_DEPTH10:
1546                 return HAL_PIXEL_FORMAT_RAW10;
1547             default:
1548                 return format;
1549         }
1550     }
1551 
1552     /**
1553      * Convert a public format compatible with {@code ImageFormat} to an internal dataspace
1554      * from {@code graphics.h}.
1555      *
1556      * <p>In particular these formats are converted:
1557      * <ul>
1558      * <li>ImageFormat.JPEG => HAL_DATASPACE_V0_JFIF
1559      * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_DATASPACE_DEPTH
1560      * <li>ImageFormat.DEPTH16 => HAL_DATASPACE_DEPTH
1561      * <li>ImageFormat.DEPTH_JPEG => HAL_DATASPACE_DYNAMIC_DEPTH
1562      * <li>ImageFormat.HEIC => HAL_DATASPACE_HEIF
1563      * <li>ImageFormat.HEIC_ULTRAHDR => DATASPACE_HEIF_ULTRAHDR
1564      * <li>ImageFormat.JPEG_R => HAL_DATASPACE_JPEG_R
1565      * <li>ImageFormat.YUV_420_888 => HAL_DATASPACE_JFIF
1566      * <li>ImageFormat.RAW_SENSOR => HAL_DATASPACE_ARBITRARY
1567      * <li>ImageFormat.RAW_OPAQUE => HAL_DATASPACE_ARBITRARY
1568      * <li>ImageFormat.RAW10 => HAL_DATASPACE_ARBITRARY
1569      * <li>ImageFormat.RAW12 => HAL_DATASPACE_ARBITRARY
1570      * <li>others => HAL_DATASPACE_UNKNOWN
1571      * </ul>
1572      * </p>
1573      *
1574      * <p>Passing in an implementation-defined format here will fail (it's not a public format);
1575      * as will passing in an internal format which has a different public format equivalent.
1576      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1577      *
1578      * <p>All other formats are returned as-is, no invalid check is performed.</p>
1579      *
1580      * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1581      *
1582      * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1583      * @return the converted image formats
1584      *
1585      * @see ImageFormat
1586      * @see PixelFormat
1587      *
1588      * @throws IllegalArgumentException
1589      *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1590      */
imageFormatToDataspace(int format)1591     static int imageFormatToDataspace(int format) {
1592         if (Flags.cameraHeifGainmap()) {
1593             if (format == ImageFormat.HEIC_ULTRAHDR) {
1594                 return DataSpace.DATASPACE_HEIF_ULTRAHDR;
1595             }
1596         }
1597         switch (format) {
1598             case ImageFormat.JPEG:
1599                 return HAL_DATASPACE_V0_JFIF;
1600             case ImageFormat.DEPTH_POINT_CLOUD:
1601             case ImageFormat.DEPTH16:
1602             case ImageFormat.RAW_DEPTH:
1603             case ImageFormat.RAW_DEPTH10:
1604                 return HAL_DATASPACE_DEPTH;
1605             case ImageFormat.DEPTH_JPEG:
1606                 return HAL_DATASPACE_DYNAMIC_DEPTH;
1607             case ImageFormat.HEIC:
1608                 return HAL_DATASPACE_HEIF;
1609             case ImageFormat.JPEG_R:
1610                 return HAL_DATASPACE_JPEG_R;
1611             case ImageFormat.YUV_420_888:
1612                 return HAL_DATASPACE_JFIF;
1613             case ImageFormat.RAW_SENSOR:
1614             case ImageFormat.RAW_PRIVATE:
1615             case ImageFormat.RAW10:
1616             case ImageFormat.RAW12:
1617                 return HAL_DATASPACE_ARBITRARY;
1618             default:
1619                 return HAL_DATASPACE_UNKNOWN;
1620         }
1621     }
1622 
1623     /**
1624      * Convert image formats from public to internal formats (in-place).
1625      *
1626      * @param formats an array of image formats
1627      * @return {@code formats}
1628      *
1629      * @see #imageFormatToInternal
1630      *
1631      * @hide
1632      */
imageFormatToInternal(int[] formats)1633     public static int[] imageFormatToInternal(int[] formats) {
1634         if (formats == null) {
1635             return null;
1636         }
1637 
1638         for (int i = 0; i < formats.length; ++i) {
1639             formats[i] = imageFormatToInternal(formats[i]);
1640         }
1641 
1642         return formats;
1643     }
1644 
getPublicFormatSizes(int format, boolean output, boolean highRes)1645     private Size[] getPublicFormatSizes(int format, boolean output, boolean highRes) {
1646         try {
1647             checkArgumentFormatSupported(format, output);
1648         } catch (IllegalArgumentException e) {
1649             return null;
1650         }
1651 
1652         int internalFormat = imageFormatToInternal(format);
1653         int dataspace = imageFormatToDataspace(format);
1654 
1655         return getInternalFormatSizes(internalFormat, dataspace, output, highRes);
1656     }
1657 
getInternalFormatSizes(int format, int dataspace, boolean output, boolean highRes)1658     private Size[] getInternalFormatSizes(int format, int dataspace,
1659             boolean output, boolean highRes) {
1660         // All depth formats are non-high-res.
1661         if (dataspace == HAL_DATASPACE_DEPTH && highRes) {
1662             return new Size[0];
1663         }
1664 
1665         SparseIntArray formatsMap =
1666                 !output ? mInputFormats :
1667                 dataspace == HAL_DATASPACE_DEPTH ? mDepthOutputFormats :
1668                 dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthOutputFormats :
1669                 dataspace == HAL_DATASPACE_HEIF ? mHeicOutputFormats :
1670                 dataspace == HAL_DATASPACE_JPEG_R ? mJpegROutputFormats :
1671                 highRes ? mHighResOutputFormats :
1672                 mOutputFormats;
1673         boolean isDataSpaceHeifUltraHDR = false;
1674         if (Flags.cameraHeifGainmap()) {
1675             if (dataspace == DataSpace.DATASPACE_HEIF_ULTRAHDR) {
1676                 formatsMap = mHeicUltraHDROutputFormats;
1677                 isDataSpaceHeifUltraHDR = true;
1678             }
1679         }
1680         int sizesCount = formatsMap.get(format);
1681         if ( ((!output || (dataspace == HAL_DATASPACE_DEPTH || dataspace == HAL_DATASPACE_JPEG_R ||
1682                             dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ||
1683                             dataspace == HAL_DATASPACE_HEIF ||
1684                             isDataSpaceHeifUltraHDR)) && sizesCount == 0) ||
1685                 (output && (dataspace != HAL_DATASPACE_DEPTH && dataspace != HAL_DATASPACE_JPEG_R &&
1686                             dataspace != HAL_DATASPACE_DYNAMIC_DEPTH &&
1687                             !isDataSpaceHeifUltraHDR &&
1688                             dataspace != HAL_DATASPACE_HEIF) &&
1689                  mAllOutputFormats.get(format) == 0)) {
1690             return null;
1691         }
1692 
1693         Size[] sizes = new Size[sizesCount];
1694         int sizeIndex = 0;
1695 
1696         StreamConfiguration[] configurations =
1697                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations :
1698                 (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthConfigurations :
1699                 (dataspace == HAL_DATASPACE_HEIF) ? mHeicConfigurations :
1700                 (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRConfigurations :
1701                 (isDataSpaceHeifUltraHDR) ? mHeicUltraHDRConfigurations :
1702                 mConfigurations;
1703         StreamConfigurationDuration[] minFrameDurations =
1704                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations :
1705                 (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthMinFrameDurations :
1706                 (dataspace == HAL_DATASPACE_HEIF) ? mHeicMinFrameDurations :
1707                 (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRMinFrameDurations :
1708                 (isDataSpaceHeifUltraHDR) ? mHeicUltraHDRMinFrameDurations :
1709                 mMinFrameDurations;
1710 
1711         for (StreamConfiguration config : configurations) {
1712             int fmt = config.getFormat();
1713             if (fmt == format && config.isOutput() == output) {
1714                 if (output && mListHighResolution) {
1715                     // Filter slow high-res output formats; include for
1716                     // highRes, remove for !highRes
1717                     long duration = 0;
1718                     for (int i = 0; i < minFrameDurations.length; i++) {
1719                         StreamConfigurationDuration d = minFrameDurations[i];
1720                         if (d.getFormat() == fmt &&
1721                                 d.getWidth() == config.getSize().getWidth() &&
1722                                 d.getHeight() == config.getSize().getHeight()) {
1723                             duration = d.getDuration();
1724                             break;
1725                         }
1726                     }
1727                     if (dataspace != HAL_DATASPACE_DEPTH &&
1728                             highRes != (duration > DURATION_20FPS_NS)) {
1729                         continue;
1730                     }
1731                 }
1732                 sizes[sizeIndex++] = config.getSize();
1733             }
1734         }
1735 
1736         // Dynamic depth streams can have both fast and also high res modes.
1737         if ((sizeIndex != sizesCount) && (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ||
1738                 dataspace == HAL_DATASPACE_HEIF) || (dataspace == HAL_DATASPACE_JPEG_R) ||
1739                 isDataSpaceHeifUltraHDR) {
1740 
1741             if (sizeIndex > sizesCount) {
1742                 throw new AssertionError(
1743                         "Too many dynamic depth sizes (expected " + sizesCount + ", actual " +
1744                         sizeIndex + ")");
1745             }
1746 
1747             if (sizeIndex <= 0) {
1748                 sizes = new Size[0];
1749             } else {
1750                 sizes = Arrays.copyOf(sizes, sizeIndex);
1751             }
1752         } else if (sizeIndex != sizesCount) {
1753             throw new AssertionError(
1754                     "Too few sizes (expected " + sizesCount + ", actual " + sizeIndex + ")");
1755         }
1756 
1757         return sizes;
1758     }
1759 
1760     /** Get the list of publicly visible output formats */
getPublicFormats(boolean output)1761     private int[] getPublicFormats(boolean output) {
1762         int[] formats = new int[getPublicFormatCount(output)];
1763 
1764         int i = 0;
1765 
1766         SparseIntArray map = getFormatsMap(output);
1767         for (int j = 0; j < map.size(); j++) {
1768             int format = map.keyAt(j);
1769             formats[i++] = imageFormatToPublic(format);
1770         }
1771         if (output) {
1772             for (int j = 0; j < mDepthOutputFormats.size(); j++) {
1773                 formats[i++] = depthFormatToPublic(mDepthOutputFormats.keyAt(j));
1774             }
1775             if (mDynamicDepthOutputFormats.size() > 0) {
1776                 // Only one publicly dynamic depth format is available.
1777                 formats[i++] = ImageFormat.DEPTH_JPEG;
1778             }
1779             if (mHeicOutputFormats.size() > 0) {
1780                 formats[i++] = ImageFormat.HEIC;
1781             }
1782             if (Flags.cameraHeifGainmap()) {
1783                 if (mHeicUltraHDROutputFormats.size() > 0) {
1784                     formats[i++] = ImageFormat.HEIC_ULTRAHDR;
1785                 }
1786             }
1787             if (mJpegROutputFormats.size() > 0) {
1788                 formats[i++] = ImageFormat.JPEG_R;
1789             }
1790         }
1791         if (formats.length != i) {
1792             throw new AssertionError("Too few formats " + i + ", expected " + formats.length);
1793         }
1794 
1795         return formats;
1796     }
1797 
1798     /** Get the format -> size count map for either output or input formats */
getFormatsMap(boolean output)1799     private SparseIntArray getFormatsMap(boolean output) {
1800         return output ? mAllOutputFormats : mInputFormats;
1801     }
1802 
getInternalFormatDuration(int format, int dataspace, Size size, int duration)1803     private long getInternalFormatDuration(int format, int dataspace, Size size, int duration) {
1804         // assume format is already checked, since its internal
1805 
1806         if (!isSupportedInternalConfiguration(format, dataspace, size)) {
1807             throw new IllegalArgumentException("size was not supported");
1808         }
1809 
1810         StreamConfigurationDuration[] durations = getDurations(duration, dataspace);
1811 
1812         for (StreamConfigurationDuration configurationDuration : durations) {
1813             if (configurationDuration.getFormat() == format &&
1814                     configurationDuration.getWidth() == size.getWidth() &&
1815                     configurationDuration.getHeight() == size.getHeight()) {
1816                 return configurationDuration.getDuration();
1817             }
1818         }
1819         // Default duration is '0' (unsupported/no extra stall)
1820         return 0;
1821     }
1822 
1823     /**
1824      * Get the durations array for the kind of duration
1825      *
1826      * @see #DURATION_MIN_FRAME
1827      * @see #DURATION_STALL
1828      * */
getDurations(int duration, int dataspace)1829     private StreamConfigurationDuration[] getDurations(int duration, int dataspace) {
1830         boolean isDataSpaceHeifUltraHDR = false;
1831         if (Flags.cameraHeifGainmap()) {
1832             if (dataspace == DataSpace.DATASPACE_HEIF_ULTRAHDR) {
1833                 isDataSpaceHeifUltraHDR = true;
1834             }
1835         }
1836         switch (duration) {
1837             case DURATION_MIN_FRAME:
1838                 return (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations :
1839                         (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ?
1840                         mDynamicDepthMinFrameDurations :
1841                         (dataspace == HAL_DATASPACE_HEIF) ? mHeicMinFrameDurations :
1842                         isDataSpaceHeifUltraHDR ? mHeicUltraHDRMinFrameDurations :
1843                         (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRMinFrameDurations :
1844                         mMinFrameDurations;
1845 
1846             case DURATION_STALL:
1847                 return (dataspace == HAL_DATASPACE_DEPTH) ? mDepthStallDurations :
1848                         (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthStallDurations :
1849                         (dataspace == HAL_DATASPACE_HEIF) ? mHeicStallDurations :
1850                         isDataSpaceHeifUltraHDR ? mHeicUltraHDRStallDurations :
1851                         (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRStallDurations :
1852                         mStallDurations;
1853             default:
1854                 throw new IllegalArgumentException("duration was invalid");
1855         }
1856     }
1857 
1858     /** Count the number of publicly-visible output formats */
getPublicFormatCount(boolean output)1859     private int getPublicFormatCount(boolean output) {
1860         SparseIntArray formatsMap = getFormatsMap(output);
1861         int size = formatsMap.size();
1862         if (output) {
1863             size += mDepthOutputFormats.size();
1864             size += mDynamicDepthOutputFormats.size();
1865             size += mHeicOutputFormats.size();
1866             size += mJpegROutputFormats.size();
1867             size += mHeicUltraHDROutputFormats.size();
1868         }
1869 
1870         return size;
1871     }
1872 
arrayContains(T[] array, T element)1873     private static <T> boolean arrayContains(T[] array, T element) {
1874         if (array == null) {
1875             return false;
1876         }
1877 
1878         for (T el : array) {
1879             if (Objects.equals(el, element)) {
1880                 return true;
1881             }
1882         }
1883 
1884         return false;
1885     }
1886 
isSupportedInternalConfiguration(int format, int dataspace, Size size)1887     private boolean isSupportedInternalConfiguration(int format, int dataspace, Size size) {
1888         boolean isDataSpaceHeifUltraHDR = false;
1889         if (Flags.cameraHeifGainmap()) {
1890             if (dataspace == DataSpace.DATASPACE_HEIF_ULTRAHDR) {
1891                 isDataSpaceHeifUltraHDR = true;
1892             }
1893         }
1894         StreamConfiguration[] configurations =
1895                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations :
1896                 (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthConfigurations :
1897                 (dataspace == HAL_DATASPACE_HEIF) ? mHeicConfigurations :
1898                 (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRConfigurations :
1899                 isDataSpaceHeifUltraHDR ? mHeicUltraHDRConfigurations :
1900                 mConfigurations;
1901 
1902         for (int i = 0; i < configurations.length; i++) {
1903             if (configurations[i].getFormat() == format &&
1904                     configurations[i].getSize().equals(size)) {
1905                 return true;
1906             }
1907         }
1908 
1909         return false;
1910     }
1911 
1912     /**
1913      * Return this {@link StreamConfigurationMap} as a string representation.
1914      *
1915      * <p>{@code "StreamConfigurationMap(Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d,
1916      * stall:%d], ... [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]), Inputs([w:%d, h:%d,
1917      * format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)]), ValidOutputFormatsForInput(
1918      * [in:%d, out:%d, ... %d], ... [in:%d, out:%d, ... %d]), HighSpeedVideoConfigurations(
1919      * [w:%d, h:%d, min_fps:%d, max_fps:%d], ... [w:%d, h:%d, min_fps:%d, max_fps:%d]))"}.</p>
1920      *
1921      * <p>{@code Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d], ...
1922      * [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d])}, where
1923      * {@code [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]} represents an output
1924      * configuration's width, height, format, minimal frame duration in nanoseconds, and stall
1925      * duration in nanoseconds.</p>
1926      *
1927      * <p>{@code Inputs([w:%d, h:%d, format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)])}, where
1928      * {@code [w:%d, h:%d, format:%s(%d)]} represents an input configuration's width, height, and
1929      * format.</p>
1930      *
1931      * <p>{@code ValidOutputFormatsForInput([in:%s(%d), out:%s(%d), ... %s(%d)],
1932      * ... [in:%s(%d), out:%s(%d), ... %s(%d)])}, where {@code [in:%s(%d), out:%s(%d), ... %s(%d)]}
1933      * represents an input format and its valid output formats.</p>
1934      *
1935      * <p>{@code HighSpeedVideoConfigurations([w:%d, h:%d, min_fps:%d, max_fps:%d],
1936      * ... [w:%d, h:%d, min_fps:%d, max_fps:%d])}, where
1937      * {@code [w:%d, h:%d, min_fps:%d, max_fps:%d]} represents a high speed video output
1938      * configuration's width, height, minimal frame rate, and maximal frame rate.</p>
1939      *
1940      * @return string representation of {@link StreamConfigurationMap}
1941      */
1942     @Override
toString()1943     public String toString() {
1944         StringBuilder sb = new StringBuilder("StreamConfiguration(");
1945         appendOutputsString(sb);
1946         sb.append(", ");
1947         appendHighResOutputsString(sb);
1948         sb.append(", ");
1949         appendInputsString(sb);
1950         sb.append(", ");
1951         appendValidOutputFormatsForInputString(sb);
1952         sb.append(", ");
1953         appendHighSpeedVideoConfigurationsString(sb);
1954         sb.append(")");
1955 
1956         return sb.toString();
1957     }
1958 
1959     /**
1960      * Size comparison method used by size comparators.
1961      *
1962      * @hide
1963      */
compareSizes(int widthA, int heightA, int widthB, int heightB)1964     public static int compareSizes(int widthA, int heightA, int widthB, int heightB) {
1965         long left = widthA * (long) heightA;
1966         long right = widthB * (long) heightB;
1967         if (left == right) {
1968             left = widthA;
1969             right = widthB;
1970         }
1971         return (left < right) ? -1 : (left > right ? 1 : 0);
1972     }
1973 
appendOutputsString(StringBuilder sb)1974     private void appendOutputsString(StringBuilder sb) {
1975         sb.append("Outputs(");
1976         int[] formats = getOutputFormats();
1977         for (int format : formats) {
1978             Size[] sizes = getOutputSizes(format);
1979             for (Size size : sizes) {
1980                 long minFrameDuration = getOutputMinFrameDuration(format, size);
1981                 long stallDuration = getOutputStallDuration(format, size);
1982                 sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
1983                         "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
1984                         format, minFrameDuration, stallDuration));
1985             }
1986         }
1987         // Remove the pending ", "
1988         if (sb.charAt(sb.length() - 1) == ' ') {
1989             sb.delete(sb.length() - 2, sb.length());
1990         }
1991         sb.append(")");
1992     }
1993 
appendHighResOutputsString(StringBuilder sb)1994     private void appendHighResOutputsString(StringBuilder sb) {
1995         sb.append("HighResolutionOutputs(");
1996         int[] formats = getOutputFormats();
1997         for (int format : formats) {
1998             Size[] sizes = getHighResolutionOutputSizes(format);
1999             if (sizes == null) continue;
2000             for (Size size : sizes) {
2001                 long minFrameDuration = getOutputMinFrameDuration(format, size);
2002                 long stallDuration = getOutputStallDuration(format, size);
2003                 sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
2004                         "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
2005                         format, minFrameDuration, stallDuration));
2006             }
2007         }
2008         // Remove the pending ", "
2009         if (sb.charAt(sb.length() - 1) == ' ') {
2010             sb.delete(sb.length() - 2, sb.length());
2011         }
2012         sb.append(")");
2013     }
2014 
appendInputsString(StringBuilder sb)2015     private void appendInputsString(StringBuilder sb) {
2016         sb.append("Inputs(");
2017         int[] formats = getInputFormats();
2018         for (int format : formats) {
2019             Size[] sizes = getInputSizes(format);
2020             for (Size size : sizes) {
2021                 sb.append(String.format("[w:%d, h:%d, format:%s(%d)], ", size.getWidth(),
2022                         size.getHeight(), formatToString(format), format));
2023             }
2024         }
2025         // Remove the pending ", "
2026         if (sb.charAt(sb.length() - 1) == ' ') {
2027             sb.delete(sb.length() - 2, sb.length());
2028         }
2029         sb.append(")");
2030     }
2031 
appendValidOutputFormatsForInputString(StringBuilder sb)2032     private void appendValidOutputFormatsForInputString(StringBuilder sb) {
2033         sb.append("ValidOutputFormatsForInput(");
2034         int[] inputFormats = getInputFormats();
2035         for (int inputFormat : inputFormats) {
2036             sb.append(String.format("[in:%s(%d), out:", formatToString(inputFormat), inputFormat));
2037             int[] outputFormats = getValidOutputFormatsForInput(inputFormat);
2038             for (int i = 0; i < outputFormats.length; i++) {
2039                 sb.append(String.format("%s(%d)", formatToString(outputFormats[i]),
2040                         outputFormats[i]));
2041                 if (i < outputFormats.length - 1) {
2042                     sb.append(", ");
2043                 }
2044             }
2045             sb.append("], ");
2046         }
2047         // Remove the pending ", "
2048         if (sb.charAt(sb.length() - 1) == ' ') {
2049             sb.delete(sb.length() - 2, sb.length());
2050         }
2051         sb.append(")");
2052     }
2053 
appendHighSpeedVideoConfigurationsString(StringBuilder sb)2054     private void appendHighSpeedVideoConfigurationsString(StringBuilder sb) {
2055         sb.append("HighSpeedVideoConfigurations(");
2056         Size[] sizes = getHighSpeedVideoSizes();
2057         for (Size size : sizes) {
2058             Range<Integer>[] ranges = getHighSpeedVideoFpsRangesFor(size);
2059             for (Range<Integer> range : ranges) {
2060                 sb.append(String.format("[w:%d, h:%d, min_fps:%d, max_fps:%d], ", size.getWidth(),
2061                         size.getHeight(), range.getLower(), range.getUpper()));
2062             }
2063         }
2064         // Remove the pending ", "
2065         if (sb.charAt(sb.length() - 1) == ' ') {
2066             sb.delete(sb.length() - 2, sb.length());
2067         }
2068         sb.append(")");
2069     }
2070 
2071     /**
2072      * @hide
2073      */
formatToString(int format)2074     public static String formatToString(int format) {
2075         if (Flags.cameraHeifGainmap()) {
2076             if (format == ImageFormat.HEIC_ULTRAHDR) {
2077                 return "HEIC_ULTRAHDR";
2078             }
2079         }
2080         switch (format) {
2081             case ImageFormat.YV12:
2082                 return "YV12";
2083             case ImageFormat.YUV_420_888:
2084                 return "YUV_420_888";
2085             case ImageFormat.NV21:
2086                 return "NV21";
2087             case ImageFormat.NV16:
2088                 return "NV16";
2089             case PixelFormat.RGB_565:
2090                 return "RGB_565";
2091             case PixelFormat.RGBA_8888:
2092                 return "RGBA_8888";
2093             case PixelFormat.RGBX_8888:
2094                 return "RGBX_8888";
2095             case PixelFormat.RGB_888:
2096                 return "RGB_888";
2097             case ImageFormat.JPEG:
2098                 return "JPEG";
2099             case ImageFormat.YUY2:
2100                 return "YUY2";
2101             case ImageFormat.Y8:
2102                 return "Y8";
2103             case ImageFormat.Y16:
2104                 return "Y16";
2105             case ImageFormat.RAW_SENSOR:
2106                 return "RAW_SENSOR";
2107             case ImageFormat.RAW_PRIVATE:
2108                 return "RAW_PRIVATE";
2109             case ImageFormat.RAW10:
2110                 return "RAW10";
2111             case ImageFormat.DEPTH16:
2112                 return "DEPTH16";
2113             case ImageFormat.DEPTH_POINT_CLOUD:
2114                 return "DEPTH_POINT_CLOUD";
2115             case ImageFormat.DEPTH_JPEG:
2116                 return "DEPTH_JPEG";
2117             case ImageFormat.RAW_DEPTH:
2118                 return "RAW_DEPTH";
2119             case ImageFormat.RAW_DEPTH10:
2120                 return "RAW_DEPTH10";
2121             case ImageFormat.PRIVATE:
2122                 return "PRIVATE";
2123             case ImageFormat.HEIC:
2124                 return "HEIC";
2125             case ImageFormat.JPEG_R:
2126                 return "JPEG/R";
2127             default:
2128                 return "UNKNOWN";
2129         }
2130     }
2131 
2132     // from system/core/include/system/graphics.h
2133     private static final int HAL_PIXEL_FORMAT_RAW16 = 0x20;
2134     /** @hide */
2135     public static final int HAL_PIXEL_FORMAT_BLOB = 0x21;
2136     private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
2137     private static final int HAL_PIXEL_FORMAT_YCbCr_420_888 = 0x23;
2138     private static final int HAL_PIXEL_FORMAT_RAW_OPAQUE = 0x24;
2139     private static final int HAL_PIXEL_FORMAT_RAW10 = 0x25;
2140     private static final int HAL_PIXEL_FORMAT_RAW12 = 0x26;
2141     private static final int HAL_PIXEL_FORMAT_Y16 = 0x20363159;
2142 
2143 
2144     private static final int HAL_DATASPACE_STANDARD_SHIFT = 16;
2145     private static final int HAL_DATASPACE_TRANSFER_SHIFT = 22;
2146     private static final int HAL_DATASPACE_RANGE_SHIFT = 27;
2147 
2148     private static final int HAL_DATASPACE_UNKNOWN = 0x0;
2149 
2150     /**
2151      * @hide
2152      */
2153     public static final int HAL_DATASPACE_ARBITRARY = 0x1;
2154 
2155     /** @hide */
2156     public static final int HAL_DATASPACE_V0_JFIF =
2157             (2 << HAL_DATASPACE_STANDARD_SHIFT) |
2158             (3 << HAL_DATASPACE_TRANSFER_SHIFT) |
2159             (1 << HAL_DATASPACE_RANGE_SHIFT);
2160 
2161     /**
2162      * @hide
2163      */
2164     public static final int HAL_DATASPACE_DEPTH = 0x1000;
2165     /**
2166      * @hide
2167      */
2168     public static final int HAL_DATASPACE_DYNAMIC_DEPTH = 0x1002;
2169     /**
2170      * @hide
2171      */
2172     public static final int HAL_DATASPACE_HEIF = 0x1004;
2173     /**
2174      * @hide
2175      */
2176     public static final int HAL_DATASPACE_JPEG_R = 0x1005;
2177     /**
2178      * @hide
2179      */
2180     public static final int HAL_DATASPACE_JFIF = 0x8C20000;
2181     private static final long DURATION_20FPS_NS = 50000000L;
2182     /**
2183      * @see #getDurations(int, int)
2184      */
2185     private static final int DURATION_MIN_FRAME = 0;
2186     private static final int DURATION_STALL = 1;
2187 
2188     private final StreamConfiguration[] mConfigurations;
2189     private final StreamConfigurationDuration[] mMinFrameDurations;
2190     private final StreamConfigurationDuration[] mStallDurations;
2191 
2192     private final StreamConfiguration[] mDepthConfigurations;
2193     private final StreamConfigurationDuration[] mDepthMinFrameDurations;
2194     private final StreamConfigurationDuration[] mDepthStallDurations;
2195 
2196     private final StreamConfiguration[] mDynamicDepthConfigurations;
2197     private final StreamConfigurationDuration[] mDynamicDepthMinFrameDurations;
2198     private final StreamConfigurationDuration[] mDynamicDepthStallDurations;
2199 
2200     private final StreamConfiguration[] mHeicConfigurations;
2201     private final StreamConfigurationDuration[] mHeicMinFrameDurations;
2202     private final StreamConfigurationDuration[] mHeicStallDurations;
2203 
2204     private final StreamConfiguration[] mHeicUltraHDRConfigurations;
2205     private final StreamConfigurationDuration[] mHeicUltraHDRMinFrameDurations;
2206     private final StreamConfigurationDuration[] mHeicUltraHDRStallDurations;
2207 
2208     private final StreamConfiguration[] mJpegRConfigurations;
2209     private final StreamConfigurationDuration[] mJpegRMinFrameDurations;
2210     private final StreamConfigurationDuration[] mJpegRStallDurations;
2211 
2212     private final HighSpeedVideoConfiguration[] mHighSpeedVideoConfigurations;
2213     private final ReprocessFormatsMap mInputOutputFormatsMap;
2214 
2215     private final boolean mListHighResolution;
2216 
2217     /** internal format -> num output sizes mapping, not including slow high-res sizes, for
2218      * non-depth dataspaces */
2219     private final SparseIntArray mOutputFormats = new SparseIntArray();
2220     /** internal format -> num output sizes mapping for slow high-res sizes, for non-depth
2221      * dataspaces */
2222     private final SparseIntArray mHighResOutputFormats = new SparseIntArray();
2223     /** internal format -> num output sizes mapping for all non-depth dataspaces */
2224     private final SparseIntArray mAllOutputFormats = new SparseIntArray();
2225     /** internal format -> num input sizes mapping, for input reprocessing formats */
2226     private final SparseIntArray mInputFormats = new SparseIntArray();
2227     /** internal format -> num depth output sizes mapping, for HAL_DATASPACE_DEPTH */
2228     private final SparseIntArray mDepthOutputFormats = new SparseIntArray();
2229     /** internal format -> num dynamic depth output sizes mapping, for HAL_DATASPACE_DYNAMIC_DEPTH */
2230     private final SparseIntArray mDynamicDepthOutputFormats = new SparseIntArray();
2231     /** internal format -> num heic output sizes mapping, for HAL_DATASPACE_HEIF */
2232     private final SparseIntArray mHeicOutputFormats = new SparseIntArray();
2233     /** internal format -> num heic output sizes mapping, for DATASPACE_HEIF_GAINMAP */
2234     private final SparseIntArray mHeicUltraHDROutputFormats = new SparseIntArray();
2235     /** internal format -> num Jpeg/R output sizes mapping, for HAL_DATASPACE_JPEG_R */
2236     private final SparseIntArray mJpegROutputFormats = new SparseIntArray();
2237 
2238     /** High speed video Size -> FPS range count mapping*/
2239     private final HashMap</*HighSpeedVideoSize*/Size, /*Count*/Integer> mHighSpeedVideoSizeMap =
2240             new HashMap<Size, Integer>();
2241     /** High speed video FPS range -> Size count mapping*/
2242     private final HashMap</*HighSpeedVideoFpsRange*/Range<Integer>, /*Count*/Integer>
2243             mHighSpeedVideoFpsRangeMap = new HashMap<Range<Integer>, Integer>();
2244 
2245 }
2246