• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.params;
18 
19 import static com.android.internal.util.Preconditions.checkArrayElementsNotNull;
20 
21 import android.graphics.ImageFormat;
22 import android.graphics.PixelFormat;
23 import android.hardware.camera2.CameraCharacteristics;
24 import android.hardware.camera2.CameraDevice;
25 import android.hardware.camera2.CameraMetadata;
26 import android.hardware.camera2.CaptureRequest;
27 import android.hardware.camera2.utils.HashCodeHelpers;
28 import android.hardware.camera2.utils.SurfaceUtils;
29 import android.util.Range;
30 import android.util.Size;
31 import android.util.SparseIntArray;
32 import android.view.Surface;
33 
34 import java.util.Arrays;
35 import java.util.HashMap;
36 import java.util.Objects;
37 import java.util.Set;
38 
39 /**
40  * Immutable class to store the available stream
41  * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP configurations} to set up
42  * {@link android.view.Surface Surfaces} for creating a
43  * {@link android.hardware.camera2.CameraCaptureSession capture session} with
44  * {@link android.hardware.camera2.CameraDevice#createCaptureSession(SessionConfiguration)}.
45  * <!-- TODO: link to input stream configuration -->
46  *
47  * <p>This is the authoritative list for all <!-- input/ -->output formats (and sizes respectively
48  * for that format) that are supported by a camera device.</p>
49  *
50  * <p>This also contains the minimum frame durations and stall durations for each format/size
51  * combination that can be used to calculate effective frame rate when submitting multiple captures.
52  * </p>
53  *
54  * <p>An instance of this object is available from {@link CameraCharacteristics} using
55  * the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP} key and the
56  * {@link CameraCharacteristics#get} method.</p>
57  *
58  * <pre><code>{@code
59  * CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
60  * StreamConfigurationMap configs = characteristics.get(
61  *         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
62  * }</code></pre>
63  *
64  * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
65  * @see CameraDevice#createCaptureSession(SessionConfiguration)
66  */
67 public final class StreamConfigurationMap {
68 
69     private static final String TAG = "StreamConfigurationMap";
70 
71     private static final int MAX_DIMEN_FOR_ROUNDING = 1920; // maximum allowed width for rounding
72 
73     /**
74      * Create a new {@link StreamConfigurationMap}.
75      *
76      * <p>The array parameters ownership is passed to this object after creation; do not
77      * write to them after this constructor is invoked.</p>
78      *
79      * @param configurations a non-{@code null} array of {@link StreamConfiguration}
80      * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
81      * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
82      * @param depthConfigurations a non-{@code null} array of depth {@link StreamConfiguration}
83      * @param depthMinFrameDurations a non-{@code null} array of depth
84      *        {@link StreamConfigurationDuration}
85      * @param depthStallDurations a non-{@code null} array of depth
86      *        {@link StreamConfigurationDuration}
87      * @param dynamicDepthConfigurations a non-{@code null} array of dynamic depth
88      *        {@link StreamConfiguration}
89      * @param dynamicDepthMinFrameDurations a non-{@code null} array of dynamic depth
90      *        {@link StreamConfigurationDuration}
91      * @param dynamicDepthStallDurations a non-{@code null} array of dynamic depth
92      *        {@link StreamConfigurationDuration}
93      * @param heicConfigurations a non-{@code null} array of heic {@link StreamConfiguration}
94      * @param heicMinFrameDurations a non-{@code null} array of heic
95      *        {@link StreamConfigurationDuration}
96      * @param heicStallDurations a non-{@code null} array of heic
97      *        {@link StreamConfigurationDuration}
98      * @param jpegRConfigurations a non-{@code null} array of Jpeg/R {@link StreamConfiguration}
99      * @param jpegRMinFrameDurations a non-{@code null} array of Jpeg/R
100      *        {@link StreamConfigurationDuration}
101      * @param jpegRStallDurations a non-{@code null} array of Jpeg/R
102      *        {@link StreamConfigurationDuration}
103      * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
104      *        camera device does not support high speed video recording
105      * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
106      *        and thus needs a separate list of slow high-resolution output sizes
107      * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations
108      *         were {@code null} or any subelements were {@code null}
109      *
110      * @hide
111      */
StreamConfigurationMap( StreamConfiguration[] configurations, StreamConfigurationDuration[] minFrameDurations, StreamConfigurationDuration[] stallDurations, StreamConfiguration[] depthConfigurations, StreamConfigurationDuration[] depthMinFrameDurations, StreamConfigurationDuration[] depthStallDurations, StreamConfiguration[] dynamicDepthConfigurations, StreamConfigurationDuration[] dynamicDepthMinFrameDurations, StreamConfigurationDuration[] dynamicDepthStallDurations, StreamConfiguration[] heicConfigurations, StreamConfigurationDuration[] heicMinFrameDurations, StreamConfigurationDuration[] heicStallDurations, StreamConfiguration[] jpegRConfigurations, StreamConfigurationDuration[] jpegRMinFrameDurations, StreamConfigurationDuration[] jpegRStallDurations, HighSpeedVideoConfiguration[] highSpeedVideoConfigurations, ReprocessFormatsMap inputOutputFormatsMap, boolean listHighResolution)112     public StreamConfigurationMap(
113             StreamConfiguration[] configurations,
114             StreamConfigurationDuration[] minFrameDurations,
115             StreamConfigurationDuration[] stallDurations,
116             StreamConfiguration[] depthConfigurations,
117             StreamConfigurationDuration[] depthMinFrameDurations,
118             StreamConfigurationDuration[] depthStallDurations,
119             StreamConfiguration[] dynamicDepthConfigurations,
120             StreamConfigurationDuration[] dynamicDepthMinFrameDurations,
121             StreamConfigurationDuration[] dynamicDepthStallDurations,
122             StreamConfiguration[] heicConfigurations,
123             StreamConfigurationDuration[] heicMinFrameDurations,
124             StreamConfigurationDuration[] heicStallDurations,
125             StreamConfiguration[] jpegRConfigurations,
126             StreamConfigurationDuration[] jpegRMinFrameDurations,
127             StreamConfigurationDuration[] jpegRStallDurations,
128             HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
129             ReprocessFormatsMap inputOutputFormatsMap,
130             boolean listHighResolution) {
131         this(configurations, minFrameDurations, stallDurations,
132                     depthConfigurations, depthMinFrameDurations, depthStallDurations,
133                     dynamicDepthConfigurations, dynamicDepthMinFrameDurations,
134                     dynamicDepthStallDurations,
135                     heicConfigurations, heicMinFrameDurations, heicStallDurations,
136                     jpegRConfigurations, jpegRMinFrameDurations, jpegRStallDurations,
137                     highSpeedVideoConfigurations, inputOutputFormatsMap, listHighResolution,
138                     /*enforceImplementationDefined*/ true);
139     }
140 
141     /**
142      * Create a new {@link StreamConfigurationMap}.
143      *
144      * <p>The array parameters ownership is passed to this object after creation; do not
145      * write to them after this constructor is invoked.</p>
146      *
147      * @param configurations a non-{@code null} array of {@link StreamConfiguration}
148      * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
149      * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
150      * @param depthConfigurations a non-{@code null} array of depth {@link StreamConfiguration}
151      * @param depthMinFrameDurations a non-{@code null} array of depth
152      *        {@link StreamConfigurationDuration}
153      * @param depthStallDurations a non-{@code null} array of depth
154      *        {@link StreamConfigurationDuration}
155      * @param dynamicDepthConfigurations a non-{@code null} array of dynamic depth
156      *        {@link StreamConfiguration}
157      * @param dynamicDepthMinFrameDurations a non-{@code null} array of dynamic depth
158      *        {@link StreamConfigurationDuration}
159      * @param dynamicDepthStallDurations a non-{@code null} array of dynamic depth
160      *        {@link StreamConfigurationDuration}
161      * @param heicConfigurations a non-{@code null} array of heic {@link StreamConfiguration}
162      * @param heicMinFrameDurations a non-{@code null} array of heic
163      *        {@link StreamConfigurationDuration}
164      * @param heicStallDurations a non-{@code null} array of heic
165      *        {@link StreamConfigurationDuration}
166      * @param jpegRConfigurations a non-{@code null} array of Jpeg/R {@link StreamConfiguration}
167      * @param jpegRMinFrameDurations a non-{@code null} array of Jpeg/R
168      *        {@link StreamConfigurationDuration}
169      * @param jpegRStallDurations a non-{@code null} array of Jpeg/R
170      *        {@link StreamConfigurationDuration}
171      * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
172      *        camera device does not support high speed video recording
173      * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
174      *        and thus needs a separate list of slow high-resolution output sizes
175      * @param enforceImplementationDefined a flag indicating whether
176      *        IMPLEMENTATION_DEFINED format configuration must be present
177      * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations
178      *         were {@code null} or any subelements were {@code null}
179      *
180      * @hide
181      */
StreamConfigurationMap( StreamConfiguration[] configurations, StreamConfigurationDuration[] minFrameDurations, StreamConfigurationDuration[] stallDurations, StreamConfiguration[] depthConfigurations, StreamConfigurationDuration[] depthMinFrameDurations, StreamConfigurationDuration[] depthStallDurations, StreamConfiguration[] dynamicDepthConfigurations, StreamConfigurationDuration[] dynamicDepthMinFrameDurations, StreamConfigurationDuration[] dynamicDepthStallDurations, StreamConfiguration[] heicConfigurations, StreamConfigurationDuration[] heicMinFrameDurations, StreamConfigurationDuration[] heicStallDurations, StreamConfiguration[] jpegRConfigurations, StreamConfigurationDuration[] jpegRMinFrameDurations, StreamConfigurationDuration[] jpegRStallDurations, HighSpeedVideoConfiguration[] highSpeedVideoConfigurations, ReprocessFormatsMap inputOutputFormatsMap, boolean listHighResolution, boolean enforceImplementationDefined)182     public StreamConfigurationMap(
183             StreamConfiguration[] configurations,
184             StreamConfigurationDuration[] minFrameDurations,
185             StreamConfigurationDuration[] stallDurations,
186             StreamConfiguration[] depthConfigurations,
187             StreamConfigurationDuration[] depthMinFrameDurations,
188             StreamConfigurationDuration[] depthStallDurations,
189             StreamConfiguration[] dynamicDepthConfigurations,
190             StreamConfigurationDuration[] dynamicDepthMinFrameDurations,
191             StreamConfigurationDuration[] dynamicDepthStallDurations,
192             StreamConfiguration[] heicConfigurations,
193             StreamConfigurationDuration[] heicMinFrameDurations,
194             StreamConfigurationDuration[] heicStallDurations,
195             StreamConfiguration[] jpegRConfigurations,
196             StreamConfigurationDuration[] jpegRMinFrameDurations,
197             StreamConfigurationDuration[] jpegRStallDurations,
198             HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
199             ReprocessFormatsMap inputOutputFormatsMap,
200             boolean listHighResolution,
201             boolean enforceImplementationDefined) {
202 
203         if (configurations == null &&
204                 depthConfigurations == null &&
205                 heicConfigurations == null) {
206             throw new NullPointerException("At least one of color/depth/heic configurations " +
207                     "must not be null");
208         }
209 
210         if (configurations == null) {
211             // If no color configurations exist, ensure depth ones do
212             mConfigurations = new StreamConfiguration[0];
213             mMinFrameDurations = new StreamConfigurationDuration[0];
214             mStallDurations = new StreamConfigurationDuration[0];
215         } else {
216             mConfigurations = checkArrayElementsNotNull(configurations, "configurations");
217             mMinFrameDurations = checkArrayElementsNotNull(minFrameDurations, "minFrameDurations");
218             mStallDurations = checkArrayElementsNotNull(stallDurations, "stallDurations");
219         }
220 
221         mListHighResolution = listHighResolution;
222 
223         if (depthConfigurations == null) {
224             mDepthConfigurations = new StreamConfiguration[0];
225             mDepthMinFrameDurations = new StreamConfigurationDuration[0];
226             mDepthStallDurations = new StreamConfigurationDuration[0];
227         } else {
228             mDepthConfigurations = checkArrayElementsNotNull(depthConfigurations,
229                     "depthConfigurations");
230             mDepthMinFrameDurations = checkArrayElementsNotNull(depthMinFrameDurations,
231                     "depthMinFrameDurations");
232             mDepthStallDurations = checkArrayElementsNotNull(depthStallDurations,
233                     "depthStallDurations");
234         }
235 
236         if (dynamicDepthConfigurations == null) {
237             mDynamicDepthConfigurations = new StreamConfiguration[0];
238             mDynamicDepthMinFrameDurations = new StreamConfigurationDuration[0];
239             mDynamicDepthStallDurations = new StreamConfigurationDuration[0];
240         } else {
241             mDynamicDepthConfigurations = checkArrayElementsNotNull(dynamicDepthConfigurations,
242                     "dynamicDepthConfigurations");
243             mDynamicDepthMinFrameDurations = checkArrayElementsNotNull(
244                     dynamicDepthMinFrameDurations, "dynamicDepthMinFrameDurations");
245             mDynamicDepthStallDurations = checkArrayElementsNotNull(dynamicDepthStallDurations,
246                     "dynamicDepthStallDurations");
247         }
248 
249         if (heicConfigurations == null) {
250             mHeicConfigurations = new StreamConfiguration[0];
251             mHeicMinFrameDurations = new StreamConfigurationDuration[0];
252             mHeicStallDurations = new StreamConfigurationDuration[0];
253         } else {
254             mHeicConfigurations = checkArrayElementsNotNull(heicConfigurations,
255                     "heicConfigurations");
256             mHeicMinFrameDurations = checkArrayElementsNotNull(heicMinFrameDurations,
257                     "heicMinFrameDurations");
258             mHeicStallDurations = checkArrayElementsNotNull(heicStallDurations,
259                     "heicStallDurations");
260         }
261 
262 
263         if (jpegRConfigurations == null) {
264             mJpegRConfigurations = new StreamConfiguration[0];
265             mJpegRMinFrameDurations = new StreamConfigurationDuration[0];
266             mJpegRStallDurations = new StreamConfigurationDuration[0];
267         } else {
268             mJpegRConfigurations = checkArrayElementsNotNull(jpegRConfigurations,
269                     "jpegRConfigurations");
270             mJpegRMinFrameDurations = checkArrayElementsNotNull(jpegRMinFrameDurations,
271                     "jpegRFrameDurations");
272             mJpegRStallDurations = checkArrayElementsNotNull(jpegRStallDurations,
273                     "jpegRStallDurations");
274         }
275 
276         if (highSpeedVideoConfigurations == null) {
277             mHighSpeedVideoConfigurations = new HighSpeedVideoConfiguration[0];
278         } else {
279             mHighSpeedVideoConfigurations = checkArrayElementsNotNull(
280                     highSpeedVideoConfigurations, "highSpeedVideoConfigurations");
281         }
282 
283         // For each format, track how many sizes there are available to configure
284         for (StreamConfiguration config : mConfigurations) {
285             int fmt = config.getFormat();
286             SparseIntArray map = null;
287             if (config.isOutput()) {
288                 mAllOutputFormats.put(fmt, mAllOutputFormats.get(fmt) + 1);
289                 long duration = 0;
290                 if (mListHighResolution) {
291                     for (StreamConfigurationDuration configurationDuration : mMinFrameDurations) {
292                         if (configurationDuration.getFormat() == fmt &&
293                                 configurationDuration.getWidth() == config.getSize().getWidth() &&
294                                 configurationDuration.getHeight() == config.getSize().getHeight()) {
295                             duration = configurationDuration.getDuration();
296                             break;
297                         }
298                     }
299                 }
300                 map = duration <= DURATION_20FPS_NS ?
301                         mOutputFormats : mHighResOutputFormats;
302             } else {
303                 map = mInputFormats;
304             }
305             map.put(fmt, map.get(fmt) + 1);
306         }
307 
308         // For each depth format, track how many sizes there are available to configure
309         for (StreamConfiguration config : mDepthConfigurations) {
310             if (!config.isOutput()) {
311                 // Ignoring input depth configs
312                 continue;
313             }
314 
315             mDepthOutputFormats.put(config.getFormat(),
316                     mDepthOutputFormats.get(config.getFormat()) + 1);
317         }
318         for (StreamConfiguration config : mDynamicDepthConfigurations) {
319             if (!config.isOutput()) {
320                 // Ignoring input configs
321                 continue;
322             }
323 
324             mDynamicDepthOutputFormats.put(config.getFormat(),
325                     mDynamicDepthOutputFormats.get(config.getFormat()) + 1);
326         }
327 
328         // For each heic format, track how many sizes there are available to configure
329         for (StreamConfiguration config : mHeicConfigurations) {
330             if (!config.isOutput()) {
331                 // Ignoring input depth configs
332                 continue;
333             }
334 
335             mHeicOutputFormats.put(config.getFormat(),
336                     mHeicOutputFormats.get(config.getFormat()) + 1);
337         }
338 
339         // For each Jpeg/R format, track how many sizes there are available to configure
340         for (StreamConfiguration config : mJpegRConfigurations) {
341             if (!config.isOutput()) {
342                 // Ignoring input Jpeg/R configs
343                 continue;
344             }
345 
346             mJpegROutputFormats.put(config.getFormat(),
347                     mJpegROutputFormats.get(config.getFormat()) + 1);
348         }
349 
350         if (configurations != null && enforceImplementationDefined &&
351                 mOutputFormats.indexOfKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) < 0) {
352             throw new AssertionError(
353                     "At least one stream configuration for IMPLEMENTATION_DEFINED must exist");
354         }
355 
356         // For each Size/FPS range, track how many FPS range/Size there are available
357         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
358             Size size = config.getSize();
359             Range<Integer> fpsRange = config.getFpsRange();
360             Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
361             if (fpsRangeCount == null) {
362                 fpsRangeCount = 0;
363             }
364             mHighSpeedVideoSizeMap.put(size, fpsRangeCount + 1);
365             Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
366             if (sizeCount == null) {
367                 sizeCount = 0;
368             }
369             mHighSpeedVideoFpsRangeMap.put(fpsRange, sizeCount + 1);
370         }
371 
372         mInputOutputFormatsMap = inputOutputFormatsMap;
373     }
374 
375     /**
376      * Get the image {@code format} output formats in this stream configuration.
377      *
378      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
379      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
380      *
381      * <p>Formats listed in this array are guaranteed to return true if queried with
382      * {@link #isOutputSupportedFor(int)}.</p>
383      *
384      * @return an array of integer format
385      *
386      * @see ImageFormat
387      * @see PixelFormat
388      */
getOutputFormats()389     public int[] getOutputFormats() {
390         return getPublicFormats(/*output*/true);
391     }
392 
393     /**
394      * Get the image {@code format} output formats for a reprocessing input format.
395      *
396      * <p>When submitting a {@link CaptureRequest} with an input Surface of a given format,
397      * the only allowed target outputs of the {@link CaptureRequest} are the ones with a format
398      * listed in the return value of this method. Including any other output Surface as a target
399      * will throw an IllegalArgumentException. If no output format is supported given the input
400      * format, an empty int[] will be returned.</p>
401      *
402      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
403      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
404      *
405      * <p>Formats listed in this array are guaranteed to return true if queried with
406      * {@link #isOutputSupportedFor(int)}.</p>
407      *
408      * @return an array of integer format
409      *
410      * @see ImageFormat
411      * @see PixelFormat
412      */
getValidOutputFormatsForInput(int inputFormat)413     public int[] getValidOutputFormatsForInput(int inputFormat) {
414         if (mInputOutputFormatsMap == null) {
415             return new int[0];
416         }
417 
418         int[] outputs = mInputOutputFormatsMap.getOutputs(inputFormat);
419         if (mHeicOutputFormats.size() > 0) {
420             // All reprocessing formats map contain JPEG.
421             int[] outputsWithHeic = Arrays.copyOf(outputs, outputs.length+1);
422             outputsWithHeic[outputs.length] = ImageFormat.HEIC;
423             return outputsWithHeic;
424         } else {
425             return outputs;
426         }
427     }
428 
429     /**
430      * Get the image {@code format} input formats in this stream configuration.
431      *
432      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
433      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
434      *
435      * @return an array of integer format
436      *
437      * @see ImageFormat
438      * @see PixelFormat
439      */
getInputFormats()440     public int[] getInputFormats() {
441         return getPublicFormats(/*output*/false);
442     }
443 
444     /**
445      * Get the supported input sizes for this input format.
446      *
447      * <p>The format must have come from {@link #getInputFormats}; otherwise
448      * {@code null} is returned.</p>
449      *
450      * @param format a format from {@link #getInputFormats}
451      * @return a non-empty array of sizes, or {@code null} if the format was not available.
452      */
getInputSizes(final int format)453     public Size[] getInputSizes(final int format) {
454         return getPublicFormatSizes(format, /*output*/false, /*highRes*/false);
455     }
456 
457     /**
458      * Determine whether or not output surfaces with a particular user-defined format can be passed
459      * {@link CameraDevice#createCaptureSession(SessionConfiguration) createCaptureSession}.
460      *
461      * <p>This method determines that the output {@code format} is supported by the camera device;
462      * each output {@code surface} target may or may not itself support that {@code format}.
463      * Refer to the class which provides the surface for additional documentation.</p>
464      *
465      * <p>Formats for which this returns {@code true} are guaranteed to exist in the result
466      * returned by {@link #getOutputSizes}.</p>
467      *
468      * @param format an image format from either {@link ImageFormat} or {@link PixelFormat}
469      * @return
470      *          {@code true} iff using a {@code surface} with this {@code format} will be
471      *          supported with {@link CameraDevice#createCaptureSession(SessionConfiguration)}
472      *
473      * @throws IllegalArgumentException
474      *          if the image format was not a defined named constant
475      *          from either {@link ImageFormat} or {@link PixelFormat}
476      *
477      * @see ImageFormat
478      * @see PixelFormat
479      * @see CameraDevice#createCaptureSession(SessionConfiguration)
480      */
isOutputSupportedFor(int format)481     public boolean isOutputSupportedFor(int format) {
482         checkArgumentFormat(format);
483 
484         int internalFormat = imageFormatToInternal(format);
485         int dataspace = imageFormatToDataspace(format);
486         if (dataspace == HAL_DATASPACE_DEPTH) {
487             return mDepthOutputFormats.indexOfKey(internalFormat) >= 0;
488         } else if (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) {
489             return mDynamicDepthOutputFormats.indexOfKey(internalFormat) >= 0;
490         } else if (dataspace == HAL_DATASPACE_HEIF) {
491             return mHeicOutputFormats.indexOfKey(internalFormat) >= 0;
492         } else if (dataspace == HAL_DATASPACE_JPEG_R) {
493             return mJpegROutputFormats.indexOfKey(internalFormat) >= 0;
494         } else {
495             return getFormatsMap(/*output*/true).indexOfKey(internalFormat) >= 0;
496         }
497     }
498 
499     /**
500      * Determine whether or not output streams can be configured with a particular class
501      * as a consumer.
502      *
503      * <p>The following list is generally usable for outputs:
504      * <ul>
505      * <li>{@link android.media.ImageReader} -
506      * Recommended for image processing or streaming to external resources (such as a file or
507      * network)
508      * <li>{@link android.media.MediaRecorder} -
509      * Recommended for recording video (simple to use)
510      * <li>{@link android.media.MediaCodec} -
511      * Recommended for recording video (more complicated to use, with more flexibility)
512      * <li>{@link android.renderscript.Allocation} -
513      * Recommended for image processing with {@link android.renderscript RenderScript}
514      * <li>{@link android.view.SurfaceHolder} -
515      * Recommended for low-power camera preview with {@link android.view.SurfaceView}
516      * <li>{@link android.graphics.SurfaceTexture} -
517      * Recommended for OpenGL-accelerated preview processing or compositing with
518      * {@link android.view.TextureView}
519      * </ul>
520      * </p>
521      *
522      * <p>Generally speaking this means that creating a {@link Surface} from that class <i>may</i>
523      * provide a producer endpoint that is suitable to be used with
524      * {@link CameraDevice#createCaptureSession(SessionConfiguration)}.</p>
525      *
526      * <p>Since not all of the above classes support output of all format and size combinations,
527      * the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.</p>
528      *
529      * @param klass a non-{@code null} {@link Class} object reference
530      * @return {@code true} if this class is supported as an output, {@code false} otherwise
531      *
532      * @throws NullPointerException if {@code klass} was {@code null}
533      *
534      * @see CameraDevice#createCaptureSession(SessionConfiguration)
535      * @see #isOutputSupportedFor(Surface)
536      */
isOutputSupportedFor(Class<T> klass)537     public static <T> boolean isOutputSupportedFor(Class<T> klass) {
538         Objects.requireNonNull(klass, "klass must not be null");
539 
540         if (klass == android.media.ImageReader.class) {
541             return true;
542         } else if (klass == android.media.MediaRecorder.class) {
543             return true;
544         } else if (klass == android.media.MediaCodec.class) {
545             return true;
546         } else if (klass == android.renderscript.Allocation.class) {
547             return true;
548         } else if (klass == android.view.SurfaceHolder.class) {
549             return true;
550         } else if (klass == android.graphics.SurfaceTexture.class) {
551             return true;
552         }
553 
554         return false;
555     }
556 
557     /**
558      * Determine whether or not the {@code surface} in its current
559      * state is suitable to be included in a {@link
560      * CameraDevice#createCaptureSession(SessionConfiguration) capture
561      * session} as an output.
562      *
563      * <p>Not all surfaces are usable with the {@link CameraDevice}, and not all configurations
564      * of that {@code surface} are compatible. Some classes that provide the {@code surface} are
565      * compatible with the {@link CameraDevice} in general
566      * (see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the
567      * {@code surface} into a state that will be compatible with the {@link CameraDevice}.</p>
568      *
569      * <p>Reasons for a {@code surface} being specifically incompatible might be:
570      * <ul>
571      * <li>Using a format that's not listed by {@link #getOutputFormats}
572      * <li>Using a format/size combination that's not listed by {@link #getOutputSizes}
573      * <li>The {@code surface} itself is not in a state where it can service a new producer.</p>
574      * </li>
575      * </ul>
576      *
577      * <p>Surfaces from flexible sources will return true even if the exact size of the Surface does
578      * not match a camera-supported size, as long as the format (or class) is supported and the
579      * camera device supports a size that is equal to or less than 1080p in that format. If such as
580      * Surface is used to create a capture session, it will have its size rounded to the nearest
581      * supported size, below or equal to 1080p. Flexible sources include SurfaceView, SurfaceTexture,
582      * and ImageReader.</p>
583      *
584      * <p>This is not an exhaustive list; see the particular class's documentation for further
585      * possible reasons of incompatibility.</p>
586      *
587      * @param surface a non-{@code null} {@link Surface} object reference
588      * @return {@code true} if this is supported, {@code false} otherwise
589      *
590      * @throws NullPointerException if {@code surface} was {@code null}
591      * @throws IllegalArgumentException if the Surface endpoint is no longer valid
592      *
593      * @see CameraDevice#createCaptureSession(SessionConfiguration)
594      * @see #isOutputSupportedFor(Class)
595      */
isOutputSupportedFor(Surface surface)596     public boolean isOutputSupportedFor(Surface surface) {
597         Objects.requireNonNull(surface, "surface must not be null");
598 
599         Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
600         int surfaceFormat = SurfaceUtils.getSurfaceFormat(surface);
601         int surfaceDataspace = SurfaceUtils.getSurfaceDataspace(surface);
602 
603         // See if consumer is flexible.
604         boolean isFlexible = SurfaceUtils.isFlexibleConsumer(surface);
605 
606         StreamConfiguration[] configs =
607                 surfaceDataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations :
608                 surfaceDataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations :
609                 surfaceDataspace == HAL_DATASPACE_HEIF ? mHeicConfigurations :
610                 surfaceDataspace == HAL_DATASPACE_JPEG_R ? mJpegRConfigurations :
611                 mConfigurations;
612         for (StreamConfiguration config : configs) {
613             if (config.getFormat() == surfaceFormat && config.isOutput()) {
614                 // Matching format, either need exact size match, or a flexible consumer
615                 // and a size no bigger than MAX_DIMEN_FOR_ROUNDING
616                 if (config.getSize().equals(surfaceSize)) {
617                     return true;
618                 } else if (isFlexible &&
619                         (config.getSize().getWidth() <= MAX_DIMEN_FOR_ROUNDING)) {
620                     return true;
621                 }
622             }
623         }
624         return false;
625     }
626 
627     /**
628      * Determine whether or not the particular stream configuration is
629      * suitable to be included in a {@link
630      * CameraDevice#createCaptureSession(SessionConfiguration) capture
631      * session} as an output.
632      *
633      * @param size stream configuration size
634      * @param format stream configuration format
635      * @return {@code true} if this is supported, {@code false} otherwise
636      *
637      * @see CameraDevice#createCaptureSession(SessionConfiguration)
638      * @see #isOutputSupportedFor(Class)
639      * @hide
640      */
isOutputSupportedFor(Size size, int format)641     public boolean isOutputSupportedFor(Size size, int format) {
642         int internalFormat = imageFormatToInternal(format);
643         int dataspace = imageFormatToDataspace(format);
644 
645         StreamConfiguration[] configs =
646                 dataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations :
647                 dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations :
648                 dataspace == HAL_DATASPACE_HEIF ? mHeicConfigurations :
649                 dataspace == HAL_DATASPACE_JPEG_R ? mJpegRConfigurations :
650                 mConfigurations;
651         for (StreamConfiguration config : configs) {
652             if ((config.getFormat() == internalFormat) && config.isOutput() &&
653                     config.getSize().equals(size)) {
654                 return true;
655             }
656         }
657 
658         return false;
659     }
660 
661     /**
662      * Get a list of sizes compatible with {@code klass} to use as an output.
663      *
664      * <p>Some of the supported classes may support additional formats beyond
665      * {@link ImageFormat#PRIVATE}; this function only returns
666      * sizes for {@link ImageFormat#PRIVATE}. For example, {@link android.media.ImageReader}
667      * supports {@link ImageFormat#YUV_420_888} and {@link ImageFormat#PRIVATE}, this method will
668      * only return the sizes for {@link ImageFormat#PRIVATE} for {@link android.media.ImageReader}
669      * class.</p>
670      *
671      * <p>If a well-defined format such as {@code NV21} is required, use
672      * {@link #getOutputSizes(int)} instead.</p>
673      *
674      * <p>The {@code klass} should be a supported output, that querying
675      * {@code #isOutputSupportedFor(Class)} should return {@code true}.</p>
676      *
677      * @param klass
678      *          a non-{@code null} {@link Class} object reference
679      * @return
680      *          an array of supported sizes for {@link ImageFormat#PRIVATE} format,
681      *          or {@code null} iff the {@code klass} is not a supported output.
682      *
683      *
684      * @throws NullPointerException if {@code klass} was {@code null}
685      *
686      * @see #isOutputSupportedFor(Class)
687      */
getOutputSizes(Class<T> klass)688     public <T> Size[] getOutputSizes(Class<T> klass) {
689         if (isOutputSupportedFor(klass) == false) {
690             return null;
691         }
692 
693         return getInternalFormatSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
694                 HAL_DATASPACE_UNKNOWN,/*output*/true, /*highRes*/false);
695     }
696 
697     /**
698      * Get a list of sizes compatible with the requested image {@code format}.
699      *
700      * <p>The {@code format} should be a supported format (one of the formats returned by
701      * {@link #getOutputFormats}).</p>
702      *
703      * As of API level 23, the {@link #getHighResolutionOutputSizes} method can be used on devices
704      * that support the
705      * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
706      * capability to get a list of high-resolution output sizes that cannot operate at the preferred
707      * 20fps rate. This means that for some supported formats, this method will return an empty
708      * list, if all the supported resolutions operate at below 20fps.  For devices that do not
709      * support the BURST_CAPTURE capability, all output resolutions are listed through this method.
710      *
711      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
712      * @return
713      *          an array of supported sizes,
714      *          or {@code null} if the {@code format} is not a supported output
715      *
716      * @see ImageFormat
717      * @see PixelFormat
718      * @see #getOutputFormats
719      */
getOutputSizes(int format)720     public Size[] getOutputSizes(int format) {
721         return getPublicFormatSizes(format, /*output*/true, /*highRes*/ false);
722     }
723 
724     /**
725      * Get a list of supported high speed video recording sizes.
726      * <p>
727      * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
728      * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
729      * list the supported high speed video size configurations. All the sizes listed will be a
730      * subset of the sizes reported by {@link #getOutputSizes} for processed non-stalling formats
731      * (typically {@link ImageFormat#PRIVATE} {@link ImageFormat#YUV_420_888}, etc.)
732      * </p>
733      * <p>
734      * To enable high speed video recording, application must create a constrained create high speed
735      * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
736      * a CaptureRequest list created by
737      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
738      * to this session. The application must select the video size from this method and
739      * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
740      * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
741      * generate the high speed request list. For example, if the application intends to do high
742      * speed recording, it can select the maximum size reported by this method to create high speed
743      * capture session. Note that for the use case of multiple output streams, application must
744      * select one unique size from this method to use (e.g., preview and recording streams must have
745      * the same size). Otherwise, the high speed session creation will fail. Once the size is
746      * selected, application can get the supported FPS ranges by
747      * {@link #getHighSpeedVideoFpsRangesFor}, and use these FPS ranges to setup the recording
748      * request lists via
749      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
750      * </p>
751      *
752      * @return an array of supported high speed video recording sizes
753      * @see #getHighSpeedVideoFpsRangesFor(Size)
754      * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
755      * @see CameraDevice#createConstrainedHighSpeedCaptureSession
756      * @see android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList
757      */
getHighSpeedVideoSizes()758     public Size[] getHighSpeedVideoSizes() {
759         Set<Size> keySet = mHighSpeedVideoSizeMap.keySet();
760         return keySet.toArray(new Size[keySet.size()]);
761     }
762 
763     /**
764      * Get the frame per second ranges (fpsMin, fpsMax) for input high speed video size.
765      * <p>
766      * See {@link #getHighSpeedVideoFpsRanges} for how to enable high speed recording.
767      * </p>
768      * <p>
769      * The {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS ranges} reported in this method
770      * must not be used to setup capture requests that are submitted to unconstrained capture
771      * sessions, or it will result in {@link IllegalArgumentException IllegalArgumentExceptions}.
772      * </p>
773      * <p>
774      * See {@link #getHighSpeedVideoFpsRanges} for the characteristics of the returned FPS ranges.
775      * </p>
776      *
777      * @param size one of the sizes returned by {@link #getHighSpeedVideoSizes()}
778      * @return an array of supported high speed video recording FPS ranges The upper bound of
779      *         returned ranges is guaranteed to be greater than or equal to 120.
780      * @throws IllegalArgumentException if input size does not exist in the return value of
781      *             getHighSpeedVideoSizes
782      * @see #getHighSpeedVideoSizes()
783      * @see #getHighSpeedVideoFpsRanges()
784      */
getHighSpeedVideoFpsRangesFor(Size size)785     public Range<Integer>[] getHighSpeedVideoFpsRangesFor(Size size) {
786         Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
787         if (fpsRangeCount == null || fpsRangeCount == 0) {
788             throw new IllegalArgumentException(String.format(
789                     "Size %s does not support high speed video recording", size));
790         }
791 
792         @SuppressWarnings("unchecked")
793         Range<Integer>[] fpsRanges = new Range[fpsRangeCount];
794         int i = 0;
795         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
796             if (size.equals(config.getSize())) {
797                 fpsRanges[i++] = config.getFpsRange();
798             }
799         }
800         return fpsRanges;
801     }
802 
803     /**
804      * Get a list of supported high speed video recording FPS ranges.
805      * <p>
806      * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
807      * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
808      * list the supported high speed video FPS range configurations. Application can then use
809      * {@link #getHighSpeedVideoSizesFor} to query available sizes for one of returned FPS range.
810      * </p>
811      * <p>
812      * To enable high speed video recording, application must create a constrained create high speed
813      * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
814      * a CaptureRequest list created by
815      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
816      * to this session. The application must select the video size from this method and
817      * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
818      * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
819      * generate the high speed request list. For example, if the application intends to do high
820      * speed recording, it can select one FPS range reported by this method, query the video sizes
821      * corresponding to this FPS range by {@link #getHighSpeedVideoSizesFor} and use one of reported
822      * sizes to create a high speed capture session. Note that for the use case of multiple output
823      * streams, application must select one unique size from this method to use (e.g., preview and
824      * recording streams must have the same size). Otherwise, the high speed session creation will
825      * fail. Once the high speed capture session is created, the application can set the FPS range
826      * in the recording request lists via
827      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
828      * </p>
829      * <p>
830      * The FPS ranges reported by this method will have below characteristics:
831      * <li>The fpsMin and fpsMax will be a multiple 30fps.</li>
832      * <li>The fpsMin will be no less than 30fps, the fpsMax will be no less than 120fps.</li>
833      * <li>At least one range will be a fixed FPS range where fpsMin == fpsMax.</li>
834      * <li>For each fixed FPS range, there will be one corresponding variable FPS range
835      * [30, fps_max] or [60, fps_max]. These kinds of FPS ranges are suitable for preview-only
836      * use cases where the application doesn't want the camera device always produce higher frame
837      * rate than the display refresh rate. Both 30fps and 60fps preview rate will not be
838      * supported for the same recording rate.</li>
839      * </p>
840      *
841      * @return an array of supported high speed video recording FPS ranges The upper bound of
842      *         returned ranges is guaranteed to be larger or equal to 120.
843      * @see #getHighSpeedVideoSizesFor
844      * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
845      * @see CameraDevice#createConstrainedHighSpeedCaptureSession
846      * @see android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList
847      */
848     @SuppressWarnings("unchecked")
getHighSpeedVideoFpsRanges()849     public Range<Integer>[] getHighSpeedVideoFpsRanges() {
850         Set<Range<Integer>> keySet = mHighSpeedVideoFpsRangeMap.keySet();
851         return keySet.toArray(new Range[keySet.size()]);
852     }
853 
854     /**
855      * Get the supported video sizes for an input high speed FPS range.
856      *
857      * <p> See {@link #getHighSpeedVideoSizes} for how to enable high speed recording.</p>
858      *
859      * @param fpsRange one of the FPS range returned by {@link #getHighSpeedVideoFpsRanges()}
860      * @return An array of video sizes to create high speed capture sessions for high speed streaming
861      *         use cases.
862      *
863      * @throws IllegalArgumentException if input FPS range does not exist in the return value of
864      *         getHighSpeedVideoFpsRanges
865      * @see #getHighSpeedVideoFpsRanges()
866      */
getHighSpeedVideoSizesFor(Range<Integer> fpsRange)867     public Size[] getHighSpeedVideoSizesFor(Range<Integer> fpsRange) {
868         Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
869         if (sizeCount == null || sizeCount == 0) {
870             throw new IllegalArgumentException(String.format(
871                     "FpsRange %s does not support high speed video recording", fpsRange));
872         }
873 
874         Size[] sizes = new Size[sizeCount];
875         int i = 0;
876         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
877             if (fpsRange.equals(config.getFpsRange())) {
878                 sizes[i++] = config.getSize();
879             }
880         }
881         return sizes;
882     }
883 
884     /**
885      * Get a list of supported high resolution sizes, which cannot operate at full BURST_CAPTURE
886      * rate.
887      *
888      * <p>This includes all output sizes that cannot meet the 20 fps frame rate requirements for the
889      * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
890      * capability.  This does not include the stall duration, so for example, a JPEG or RAW16 output
891      * resolution with a large stall duration but a minimum frame duration that's above 20 fps will
892      * still be listed in the regular {@link #getOutputSizes} list. All the sizes on this list that
893      * are less than 24 megapixels are still guaranteed to operate at a rate of at least 10 fps,
894      * not including stall duration. Sizes on this list that are at least 24 megapixels are allowed
895      * to operate at less than 10 fps.</p>
896      *
897      * <p>For a device that does not support the BURST_CAPTURE capability, this list will be
898      * {@code null}, since resolutions in the {@link #getOutputSizes} list are already not
899      * guaranteed to meet &gt;= 20 fps rate requirements. For a device that does support the
900      * BURST_CAPTURE capability, this list may be empty, if all supported resolutions meet the 20
901      * fps requirement.</p>
902      *
903      * @return an array of supported slower high-resolution sizes, or {@code null} if the
904      *         BURST_CAPTURE capability is not supported
905      */
getHighResolutionOutputSizes(int format)906     public Size[] getHighResolutionOutputSizes(int format) {
907         if (!mListHighResolution) return null;
908 
909         return getPublicFormatSizes(format, /*output*/true, /*highRes*/ true);
910     }
911 
912     /**
913      * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
914      * for the format/size combination (in nanoseconds).
915      *
916      * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
917      * <p>{@code size} should be one of the ones returned by
918      * {@link #getOutputSizes(int)}.</p>
919      *
920      * <p>This corresponds to the minimum frame duration (maximum frame rate) possible when only
921      * that stream is configured in a session, with all processing (typically in
922      * {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.  </p>
923      *
924      * <p>When multiple streams are used in a session, the minimum frame duration will be
925      * {@code max(individual stream min durations)}.  See {@link #getOutputStallDuration} for
926      * details of timing for formats that may cause frame rate slowdown when they are targeted by a
927      * capture request.</p>
928      *
929      * <p>For devices that do not support manual sensor control
930      * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
931      * this function may return 0.</p>
932      *
933      * <p>The minimum frame duration of a stream (of a particular format, size) is the same
934      * regardless of whether the stream is input or output.</p>
935      *
936      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
937      * @param size an output-compatible size
938      * @return a minimum frame duration {@code >} 0 in nanoseconds, or
939      *          0 if the minimum frame duration is not available.
940      *
941      * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
942      * @throws NullPointerException if {@code size} was {@code null}
943      *
944      * @see CaptureRequest#SENSOR_FRAME_DURATION
945      * @see #getOutputStallDuration(int, Size)
946      * @see ImageFormat
947      * @see PixelFormat
948      */
getOutputMinFrameDuration(int format, Size size)949     public long getOutputMinFrameDuration(int format, Size size) {
950         Objects.requireNonNull(size, "size must not be null");
951         checkArgumentFormatSupported(format, /*output*/true);
952 
953         return getInternalFormatDuration(imageFormatToInternal(format),
954                 imageFormatToDataspace(format),
955                 size,
956                 DURATION_MIN_FRAME);
957     }
958 
959     /**
960      * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
961      * for the class/size combination (in nanoseconds).
962      *
963      * <p>This assumes that the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
964      * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
965      *
966      * <p>{@code klass} should be one of the ones which is supported by
967      * {@link #isOutputSupportedFor(Class)}.</p>
968      *
969      * <p>{@code size} should be one of the ones returned by
970      * {@link #getOutputSizes(int)}.</p>
971      *
972      * <p>This corresponds to the minimum frame duration (maximum frame rate) possible when only
973      * that stream is configured in a session, with all processing (typically in
974      * {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.  </p>
975      *
976      * <p>When multiple streams are used in a session, the minimum frame duration will be
977      * {@code max(individual stream min durations)}.  See {@link #getOutputStallDuration} for
978      * details of timing for formats that may cause frame rate slowdown when they are targeted by a
979      * capture request.</p>
980      *
981      * <p>For devices that do not support manual sensor control
982      * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
983      * this function may return 0.</p>
984      *
985      * <p>The minimum frame duration of a stream (of a particular format, size) is the same
986      * regardless of whether the stream is input or output.</p>
987      *
988      * @param klass
989      *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
990      *          non-empty array returned by {@link #getOutputSizes(Class)}
991      * @param size an output-compatible size
992      * @return a minimum frame duration {@code >} 0 in nanoseconds, or
993      *          0 if the minimum frame duration is not available.
994      *
995      * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
996      * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
997      *
998      * @see CaptureRequest#SENSOR_FRAME_DURATION
999      * @see ImageFormat
1000      * @see PixelFormat
1001      */
getOutputMinFrameDuration(final Class<T> klass, final Size size)1002     public <T> long getOutputMinFrameDuration(final Class<T> klass, final Size size) {
1003         if (!isOutputSupportedFor(klass)) {
1004             throw new IllegalArgumentException("klass was not supported");
1005         }
1006 
1007         return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1008                 HAL_DATASPACE_UNKNOWN,
1009                 size, DURATION_MIN_FRAME);
1010     }
1011 
1012     /**
1013      * Get the stall duration for the format/size combination (in nanoseconds).
1014      *
1015      * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
1016      * <p>{@code size} should be one of the ones returned by
1017      * {@link #getOutputSizes(int)}.</p>
1018      *
1019      * <p>
1020      * A stall duration is how much extra time would get added to the normal minimum frame duration
1021      * for a repeating request that has streams with non-zero stall.
1022      *
1023      * <p>For example, consider JPEG captures which have the following characteristics:
1024      *
1025      * <ul>
1026      * <li>JPEG streams act like processed YUV streams in requests for which they are not included;
1027      * in requests in which they are directly referenced, they act as JPEG streams.
1028      * This is because supporting a JPEG stream requires the underlying YUV data to always be ready
1029      * for use by a JPEG encoder, but the encoder will only be used (and impact frame duration) on
1030      * requests that actually reference a JPEG stream.
1031      * <li>The JPEG processor can run concurrently to the rest of the camera pipeline, but cannot
1032      * process more than 1 capture at a time.
1033      * </ul>
1034      *
1035      * <p>In other words, using a repeating YUV request would result in a steady frame rate
1036      * (let's say it's 30 FPS). If a single JPEG request is submitted periodically,
1037      * the frame rate will stay at 30 FPS (as long as we wait for the previous JPEG to return each
1038      * time). If we try to submit a repeating YUV + JPEG request, then the frame rate will drop from
1039      * 30 FPS.</p>
1040      *
1041      * <p>In general, submitting a new request with a non-0 stall time stream will <em>not</em> cause a
1042      * frame rate drop unless there are still outstanding buffers for that stream from previous
1043      * requests.</p>
1044      *
1045      * <p>Submitting a repeating request with streams (call this {@code S}) is the same as setting
1046      * the minimum frame duration from the normal minimum frame duration corresponding to {@code S},
1047      * added with the maximum stall duration for {@code S}.</p>
1048      *
1049      * <p>If interleaving requests with and without a stall duration, a request will stall by the
1050      * maximum of the remaining times for each can-stall stream with outstanding buffers.</p>
1051      *
1052      * <p>This means that a stalling request will not have an exposure start until the stall has
1053      * completed.</p>
1054      *
1055      * <p>This should correspond to the stall duration when only that stream is active, with all
1056      * processing (typically in {@code android.*.mode}) set to {@code FAST} or {@code OFF}.
1057      * Setting any of the processing modes to {@code HIGH_QUALITY} effectively results in an
1058      * indeterminate stall duration for all streams in a request (the regular stall calculation
1059      * rules are ignored).</p>
1060      *
1061      * <p>The following formats may always have a stall duration:
1062      * <ul>
1063      * <li>{@link ImageFormat#JPEG JPEG}
1064      * <li>{@link ImageFormat#RAW_SENSOR RAW16}
1065      * <li>{@link ImageFormat#RAW_PRIVATE RAW_PRIVATE}
1066      * </ul>
1067      * </p>
1068      *
1069      * <p>The following formats will never have a stall duration:
1070      * <ul>
1071      * <li>{@link ImageFormat#YUV_420_888 YUV_420_888}
1072      * <li>{@link ImageFormat#PRIVATE PRIVATE}
1073      * </ul></p>
1074      *
1075      * <p>
1076      * All other formats may or may not have an allowed stall duration on a per-capability basis;
1077      * refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
1078      * android.request.availableCapabilities} for more details.</p>
1079      * </p>
1080      *
1081      * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
1082      * for more information about calculating the max frame rate (absent stalls).</p>
1083      *
1084      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
1085      * @param size an output-compatible size
1086      * @return a stall duration {@code >=} 0 in nanoseconds
1087      *
1088      * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
1089      * @throws NullPointerException if {@code size} was {@code null}
1090      *
1091      * @see CaptureRequest#SENSOR_FRAME_DURATION
1092      * @see ImageFormat
1093      * @see PixelFormat
1094      */
getOutputStallDuration(int format, Size size)1095     public long getOutputStallDuration(int format, Size size) {
1096         checkArgumentFormatSupported(format, /*output*/true);
1097 
1098         return getInternalFormatDuration(imageFormatToInternal(format),
1099                 imageFormatToDataspace(format),
1100                 size,
1101                 DURATION_STALL);
1102     }
1103 
1104     /**
1105      * Get the stall duration for the class/size combination (in nanoseconds).
1106      *
1107      * <p>This assumes that the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
1108      * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
1109      *
1110      * <p>{@code klass} should be one of the ones with a non-empty array returned by
1111      * {@link #getOutputSizes(Class)}.</p>
1112      *
1113      * <p>{@code size} should be one of the ones returned by
1114      * {@link #getOutputSizes(Class)}.</p>
1115      *
1116      * <p>See {@link #getOutputStallDuration(int, Size)} for a definition of a
1117      * <em>stall duration</em>.</p>
1118      *
1119      * @param klass
1120      *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
1121      *          non-empty array returned by {@link #getOutputSizes(Class)}
1122      * @param size an output-compatible size
1123      * @return a minimum frame duration {@code >=} 0 in nanoseconds
1124      *
1125      * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
1126      * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
1127      *
1128      * @see CaptureRequest#SENSOR_FRAME_DURATION
1129      * @see ImageFormat
1130      * @see PixelFormat
1131      */
getOutputStallDuration(final Class<T> klass, final Size size)1132     public <T> long getOutputStallDuration(final Class<T> klass, final Size size) {
1133         if (!isOutputSupportedFor(klass)) {
1134             throw new IllegalArgumentException("klass was not supported");
1135         }
1136 
1137         return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1138                 HAL_DATASPACE_UNKNOWN, size, DURATION_STALL);
1139     }
1140 
1141     /**
1142      * Check if this {@link StreamConfigurationMap} is equal to another
1143      * {@link StreamConfigurationMap}.
1144      *
1145      * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
1146      *
1147      * @return {@code true} if the objects were equal, {@code false} otherwise
1148      */
1149     @Override
equals(final Object obj)1150     public boolean equals(final Object obj) {
1151         if (obj == null) {
1152             return false;
1153         }
1154         if (this == obj) {
1155             return true;
1156         }
1157         if (obj instanceof StreamConfigurationMap) {
1158             final StreamConfigurationMap other = (StreamConfigurationMap) obj;
1159             // XX: do we care about order?
1160             return Arrays.equals(mConfigurations, other.mConfigurations) &&
1161                     Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) &&
1162                     Arrays.equals(mStallDurations, other.mStallDurations) &&
1163                     Arrays.equals(mDepthConfigurations, other.mDepthConfigurations) &&
1164                     Arrays.equals(mDepthMinFrameDurations, other.mDepthMinFrameDurations) &&
1165                     Arrays.equals(mDepthStallDurations, other.mDepthStallDurations) &&
1166                     Arrays.equals(mDynamicDepthConfigurations, other.mDynamicDepthConfigurations) &&
1167                     Arrays.equals(mDynamicDepthMinFrameDurations,
1168                             other.mDynamicDepthMinFrameDurations) &&
1169                     Arrays.equals(mDynamicDepthStallDurations, other.mDynamicDepthStallDurations) &&
1170                     Arrays.equals(mHeicConfigurations, other.mHeicConfigurations) &&
1171                     Arrays.equals(mHeicMinFrameDurations, other.mHeicMinFrameDurations) &&
1172                     Arrays.equals(mHeicStallDurations, other.mHeicStallDurations) &&
1173                     Arrays.equals(mJpegRConfigurations, other.mJpegRConfigurations) &&
1174                     Arrays.equals(mJpegRMinFrameDurations, other.mJpegRMinFrameDurations) &&
1175                     Arrays.equals(mJpegRStallDurations, other.mJpegRStallDurations) &&
1176                     Arrays.equals(mHighSpeedVideoConfigurations,
1177                             other.mHighSpeedVideoConfigurations);
1178         }
1179         return false;
1180     }
1181 
1182     /**
1183      * {@inheritDoc}
1184      */
1185     @Override
hashCode()1186     public int hashCode() {
1187         // XX: do we care about order?
1188         return HashCodeHelpers.hashCodeGeneric(
1189                 mConfigurations, mMinFrameDurations, mStallDurations,
1190                 mDepthConfigurations, mDepthMinFrameDurations, mDepthStallDurations,
1191                 mDynamicDepthConfigurations, mDynamicDepthMinFrameDurations,
1192                 mDynamicDepthStallDurations, mHeicConfigurations,
1193                 mHeicMinFrameDurations, mHeicStallDurations,
1194                 mJpegRConfigurations, mJpegRMinFrameDurations, mJpegRStallDurations,
1195                 mHighSpeedVideoConfigurations);
1196     }
1197 
1198     // Check that the argument is supported by #getOutputFormats or #getInputFormats
checkArgumentFormatSupported(int format, boolean output)1199     private int checkArgumentFormatSupported(int format, boolean output) {
1200         checkArgumentFormat(format);
1201 
1202         int internalFormat = imageFormatToInternal(format);
1203         int internalDataspace = imageFormatToDataspace(format);
1204 
1205         if (output) {
1206             if (internalDataspace == HAL_DATASPACE_DEPTH) {
1207                 if (mDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
1208                     return format;
1209                 }
1210             } else if (internalDataspace == HAL_DATASPACE_DYNAMIC_DEPTH) {
1211                 if (mDynamicDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
1212                     return format;
1213                 }
1214             } else if (internalDataspace == HAL_DATASPACE_HEIF) {
1215                 if (mHeicOutputFormats.indexOfKey(internalFormat) >= 0) {
1216                     return format;
1217                 }
1218             } else if (internalDataspace == HAL_DATASPACE_JPEG_R) {
1219                 if (mJpegROutputFormats.indexOfKey(internalFormat) >= 0) {
1220                     return format;
1221                 }
1222             } else {
1223                 if (mAllOutputFormats.indexOfKey(internalFormat) >= 0) {
1224                     return format;
1225                 }
1226             }
1227         } else {
1228             if (mInputFormats.indexOfKey(internalFormat) >= 0) {
1229                 return format;
1230             }
1231         }
1232 
1233         throw new IllegalArgumentException(String.format(
1234                 "format %x is not supported by this stream configuration map", format));
1235     }
1236 
1237     /**
1238      * Ensures that the format is either user-defined or implementation defined.
1239      *
1240      * <p>If a format has a different internal representation than the public representation,
1241      * passing in the public representation here will fail.</p>
1242      *
1243      * <p>For example if trying to use {@link ImageFormat#JPEG}:
1244      * it has a different public representation than the internal representation
1245      * {@code HAL_PIXEL_FORMAT_BLOB}, this check will fail.</p>
1246      *
1247      * <p>Any invalid/undefined formats will raise an exception.</p>
1248      *
1249      * @param format image format
1250      * @return the format
1251      *
1252      * @throws IllegalArgumentException if the format was invalid
1253      */
checkArgumentFormatInternal(int format)1254     static int checkArgumentFormatInternal(int format) {
1255         switch (format) {
1256             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1257             case HAL_PIXEL_FORMAT_BLOB:
1258             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1259             case HAL_PIXEL_FORMAT_Y16:
1260                 return format;
1261             case ImageFormat.JPEG:
1262             case ImageFormat.HEIC:
1263                 throw new IllegalArgumentException(
1264                         "An unknown internal format: " + format);
1265             default:
1266                 return checkArgumentFormat(format);
1267         }
1268     }
1269 
1270     /**
1271      * Ensures that the format is publicly user-defined in either ImageFormat or PixelFormat.
1272      *
1273      * <p>If a format has a different public representation than the internal representation,
1274      * passing in the internal representation here will fail.</p>
1275      *
1276      * <p>For example if trying to use {@code HAL_PIXEL_FORMAT_BLOB}:
1277      * it has a different internal representation than the public representation
1278      * {@link ImageFormat#JPEG}, this check will fail.</p>
1279      *
1280      * <p>Any invalid/undefined formats will raise an exception, including implementation-defined.
1281      * </p>
1282      *
1283      * <p>Note that {@code @hide} and deprecated formats will not pass this check.</p>
1284      *
1285      * @param format image format
1286      * @return the format
1287      *
1288      * @throws IllegalArgumentException if the format was not user-defined
1289      */
checkArgumentFormat(int format)1290     static int checkArgumentFormat(int format) {
1291         if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
1292             throw new IllegalArgumentException(String.format(
1293                     "format 0x%x was not defined in either ImageFormat or PixelFormat", format));
1294         }
1295 
1296         return format;
1297     }
1298 
1299     /**
1300      * Convert an internal format compatible with {@code graphics.h} into public-visible
1301      * {@code ImageFormat}. This assumes the dataspace of the format is not HAL_DATASPACE_DEPTH.
1302      *
1303      * <p>In particular these formats are converted:
1304      * <ul>
1305      * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.JPEG</li>
1306      * </ul>
1307      * </p>
1308      *
1309      * <p>Passing in a format which has no public equivalent will fail;
1310      * as will passing in a public format which has a different internal format equivalent.
1311      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1312      *
1313      * <p>All other formats are returned as-is, no further invalid check is performed.</p>
1314      *
1315      * <p>This function is the dual of {@link #imageFormatToInternal} for dataspaces other than
1316      * HAL_DATASPACE_DEPTH.</p>
1317      *
1318      * @param format image format from {@link ImageFormat} or {@link PixelFormat}
1319      * @return the converted image formats
1320      *
1321      * @throws IllegalArgumentException
1322      *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
1323      *          {@link ImageFormat#JPEG}
1324      *
1325      * @see ImageFormat
1326      * @see PixelFormat
1327      * @see #checkArgumentFormat
1328      * @hide
1329      */
imageFormatToPublic(int format)1330     public static int imageFormatToPublic(int format) {
1331         switch (format) {
1332             case HAL_PIXEL_FORMAT_BLOB:
1333                 return ImageFormat.JPEG;
1334             case ImageFormat.JPEG:
1335                 throw new IllegalArgumentException(
1336                         "ImageFormat.JPEG is an unknown internal format");
1337             default:
1338                 return format;
1339         }
1340     }
1341 
1342     /**
1343      * Convert an internal format compatible with {@code graphics.h} into public-visible
1344      * {@code ImageFormat}. This assumes the dataspace of the format is HAL_DATASPACE_DEPTH.
1345      *
1346      * <p>In particular these formats are converted:
1347      * <ul>
1348      * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.DEPTH_POINT_CLOUD
1349      * <li>HAL_PIXEL_FORMAT_Y16 => ImageFormat.DEPTH16
1350      * </ul>
1351      * </p>
1352      *
1353      * <p>Passing in an implementation-defined format which has no public equivalent will fail;
1354      * as will passing in a public format which has a different internal format equivalent.
1355      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1356      *
1357      * <p>All other formats are returned as-is, no further invalid check is performed.</p>
1358      *
1359      * <p>This function is the dual of {@link #imageFormatToInternal} for formats associated with
1360      * HAL_DATASPACE_DEPTH.</p>
1361      *
1362      * @param format image format from {@link ImageFormat} or {@link PixelFormat}
1363      * @return the converted image formats
1364      *
1365      * @throws IllegalArgumentException
1366      *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
1367      *          {@link ImageFormat#JPEG}
1368      *
1369      * @see ImageFormat
1370      * @see PixelFormat
1371      * @see #checkArgumentFormat
1372      * @hide
1373      */
depthFormatToPublic(int format)1374     public static int depthFormatToPublic(int format) {
1375         switch (format) {
1376             case HAL_PIXEL_FORMAT_BLOB:
1377                 return ImageFormat.DEPTH_POINT_CLOUD;
1378             case HAL_PIXEL_FORMAT_Y16:
1379                 return ImageFormat.DEPTH16;
1380             case HAL_PIXEL_FORMAT_RAW16:
1381                 return ImageFormat.RAW_DEPTH;
1382             case HAL_PIXEL_FORMAT_RAW10:
1383                 return ImageFormat.RAW_DEPTH10;
1384             case ImageFormat.JPEG:
1385                 throw new IllegalArgumentException(
1386                         "ImageFormat.JPEG is an unknown internal format");
1387             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1388                 throw new IllegalArgumentException(
1389                         "IMPLEMENTATION_DEFINED must not leak to public API");
1390             default:
1391                 throw new IllegalArgumentException(
1392                         "Unknown DATASPACE_DEPTH format " + format);
1393         }
1394     }
1395 
1396     /**
1397      * Convert image formats from internal to public formats (in-place).
1398      *
1399      * @param formats an array of image formats
1400      * @return {@code formats}
1401      *
1402      * @see #imageFormatToPublic
1403      */
imageFormatToPublic(int[] formats)1404     static int[] imageFormatToPublic(int[] formats) {
1405         if (formats == null) {
1406             return null;
1407         }
1408 
1409         for (int i = 0; i < formats.length; ++i) {
1410             formats[i] = imageFormatToPublic(formats[i]);
1411         }
1412 
1413         return formats;
1414     }
1415 
1416     /**
1417      * Convert a public format compatible with {@code ImageFormat} to an internal format
1418      * from {@code graphics.h}.
1419      *
1420      * <p>In particular these formats are converted:
1421      * <ul>
1422      * <li>ImageFormat.JPEG => HAL_PIXEL_FORMAT_BLOB
1423      * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_PIXEL_FORMAT_BLOB
1424      * <li>ImageFormat.DEPTH_JPEG => HAL_PIXEL_FORMAT_BLOB
1425      * <li>ImageFormat.HEIC => HAL_PIXEL_FORMAT_BLOB
1426      * <li>ImageFormat.JPEG_R => HAL_PIXEL_FORMAT_BLOB
1427      * <li>ImageFormat.DEPTH16 => HAL_PIXEL_FORMAT_Y16
1428      * </ul>
1429      * </p>
1430      *
1431      * <p>Passing in an internal format which has a different public format equivalent will fail.
1432      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1433      *
1434      * <p>All other formats are returned as-is, no invalid check is performed.</p>
1435      *
1436      * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1437      *
1438      * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1439      * @return the converted image formats
1440      *
1441      * @see ImageFormat
1442      * @see PixelFormat
1443      *
1444      * @throws IllegalArgumentException
1445      *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1446      */
imageFormatToInternal(int format)1447     static int imageFormatToInternal(int format) {
1448         switch (format) {
1449             case ImageFormat.JPEG:
1450             case ImageFormat.DEPTH_POINT_CLOUD:
1451             case ImageFormat.DEPTH_JPEG:
1452             case ImageFormat.HEIC:
1453             case ImageFormat.JPEG_R:
1454                 return HAL_PIXEL_FORMAT_BLOB;
1455             case ImageFormat.DEPTH16:
1456                 return HAL_PIXEL_FORMAT_Y16;
1457             case ImageFormat.RAW_DEPTH:
1458                 return HAL_PIXEL_FORMAT_RAW16;
1459             case ImageFormat.RAW_DEPTH10:
1460                 return HAL_PIXEL_FORMAT_RAW10;
1461             default:
1462                 return format;
1463         }
1464     }
1465 
1466     /**
1467      * Convert a public format compatible with {@code ImageFormat} to an internal dataspace
1468      * from {@code graphics.h}.
1469      *
1470      * <p>In particular these formats are converted:
1471      * <ul>
1472      * <li>ImageFormat.JPEG => HAL_DATASPACE_V0_JFIF
1473      * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_DATASPACE_DEPTH
1474      * <li>ImageFormat.DEPTH16 => HAL_DATASPACE_DEPTH
1475      * <li>ImageFormat.DEPTH_JPEG => HAL_DATASPACE_DYNAMIC_DEPTH
1476      * <li>ImageFormat.HEIC => HAL_DATASPACE_HEIF
1477      * <li>ImageFormat.JPEG_R => HAL_DATASPACE_JPEG_R
1478      * <li>others => HAL_DATASPACE_UNKNOWN
1479      * </ul>
1480      * </p>
1481      *
1482      * <p>Passing in an implementation-defined format here will fail (it's not a public format);
1483      * as will passing in an internal format which has a different public format equivalent.
1484      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1485      *
1486      * <p>All other formats are returned as-is, no invalid check is performed.</p>
1487      *
1488      * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1489      *
1490      * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1491      * @return the converted image formats
1492      *
1493      * @see ImageFormat
1494      * @see PixelFormat
1495      *
1496      * @throws IllegalArgumentException
1497      *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1498      */
imageFormatToDataspace(int format)1499     static int imageFormatToDataspace(int format) {
1500         switch (format) {
1501             case ImageFormat.JPEG:
1502                 return HAL_DATASPACE_V0_JFIF;
1503             case ImageFormat.DEPTH_POINT_CLOUD:
1504             case ImageFormat.DEPTH16:
1505             case ImageFormat.RAW_DEPTH:
1506             case ImageFormat.RAW_DEPTH10:
1507                 return HAL_DATASPACE_DEPTH;
1508             case ImageFormat.DEPTH_JPEG:
1509                 return HAL_DATASPACE_DYNAMIC_DEPTH;
1510             case ImageFormat.HEIC:
1511                 return HAL_DATASPACE_HEIF;
1512             case ImageFormat.JPEG_R:
1513                 return HAL_DATASPACE_JPEG_R;
1514             default:
1515                 return HAL_DATASPACE_UNKNOWN;
1516         }
1517     }
1518 
1519     /**
1520      * Convert image formats from public to internal formats (in-place).
1521      *
1522      * @param formats an array of image formats
1523      * @return {@code formats}
1524      *
1525      * @see #imageFormatToInternal
1526      *
1527      * @hide
1528      */
imageFormatToInternal(int[] formats)1529     public static int[] imageFormatToInternal(int[] formats) {
1530         if (formats == null) {
1531             return null;
1532         }
1533 
1534         for (int i = 0; i < formats.length; ++i) {
1535             formats[i] = imageFormatToInternal(formats[i]);
1536         }
1537 
1538         return formats;
1539     }
1540 
getPublicFormatSizes(int format, boolean output, boolean highRes)1541     private Size[] getPublicFormatSizes(int format, boolean output, boolean highRes) {
1542         try {
1543             checkArgumentFormatSupported(format, output);
1544         } catch (IllegalArgumentException e) {
1545             return null;
1546         }
1547 
1548         int internalFormat = imageFormatToInternal(format);
1549         int dataspace = imageFormatToDataspace(format);
1550 
1551         return getInternalFormatSizes(internalFormat, dataspace, output, highRes);
1552     }
1553 
getInternalFormatSizes(int format, int dataspace, boolean output, boolean highRes)1554     private Size[] getInternalFormatSizes(int format, int dataspace,
1555             boolean output, boolean highRes) {
1556         // All depth formats are non-high-res.
1557         if (dataspace == HAL_DATASPACE_DEPTH && highRes) {
1558             return new Size[0];
1559         }
1560 
1561         SparseIntArray formatsMap =
1562                 !output ? mInputFormats :
1563                 dataspace == HAL_DATASPACE_DEPTH ? mDepthOutputFormats :
1564                 dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthOutputFormats :
1565                 dataspace == HAL_DATASPACE_HEIF ? mHeicOutputFormats :
1566                 dataspace == HAL_DATASPACE_JPEG_R ? mJpegROutputFormats :
1567                 highRes ? mHighResOutputFormats :
1568                 mOutputFormats;
1569 
1570         int sizesCount = formatsMap.get(format);
1571         if ( ((!output || (dataspace == HAL_DATASPACE_DEPTH || dataspace == HAL_DATASPACE_JPEG_R ||
1572                             dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ||
1573                             dataspace == HAL_DATASPACE_HEIF)) && sizesCount == 0) ||
1574                 (output && (dataspace != HAL_DATASPACE_DEPTH && dataspace != HAL_DATASPACE_JPEG_R &&
1575                             dataspace != HAL_DATASPACE_DYNAMIC_DEPTH &&
1576                             dataspace != HAL_DATASPACE_HEIF) &&
1577                  mAllOutputFormats.get(format) == 0)) {
1578             return null;
1579         }
1580 
1581         Size[] sizes = new Size[sizesCount];
1582         int sizeIndex = 0;
1583 
1584         StreamConfiguration[] configurations =
1585                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations :
1586                 (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthConfigurations :
1587                 (dataspace == HAL_DATASPACE_HEIF) ? mHeicConfigurations :
1588                 (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRConfigurations :
1589                 mConfigurations;
1590         StreamConfigurationDuration[] minFrameDurations =
1591                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations :
1592                 (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthMinFrameDurations :
1593                 (dataspace == HAL_DATASPACE_HEIF) ? mHeicMinFrameDurations :
1594                 (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRMinFrameDurations :
1595                 mMinFrameDurations;
1596 
1597         for (StreamConfiguration config : configurations) {
1598             int fmt = config.getFormat();
1599             if (fmt == format && config.isOutput() == output) {
1600                 if (output && mListHighResolution) {
1601                     // Filter slow high-res output formats; include for
1602                     // highRes, remove for !highRes
1603                     long duration = 0;
1604                     for (int i = 0; i < minFrameDurations.length; i++) {
1605                         StreamConfigurationDuration d = minFrameDurations[i];
1606                         if (d.getFormat() == fmt &&
1607                                 d.getWidth() == config.getSize().getWidth() &&
1608                                 d.getHeight() == config.getSize().getHeight()) {
1609                             duration = d.getDuration();
1610                             break;
1611                         }
1612                     }
1613                     if (dataspace != HAL_DATASPACE_DEPTH &&
1614                             highRes != (duration > DURATION_20FPS_NS)) {
1615                         continue;
1616                     }
1617                 }
1618                 sizes[sizeIndex++] = config.getSize();
1619             }
1620         }
1621 
1622         // Dynamic depth streams can have both fast and also high res modes.
1623         if ((sizeIndex != sizesCount) && (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ||
1624                 dataspace == HAL_DATASPACE_HEIF) || (dataspace == HAL_DATASPACE_JPEG_R)) {
1625 
1626             if (sizeIndex > sizesCount) {
1627                 throw new AssertionError(
1628                         "Too many dynamic depth sizes (expected " + sizesCount + ", actual " +
1629                         sizeIndex + ")");
1630             }
1631 
1632             if (sizeIndex <= 0) {
1633                 sizes = new Size[0];
1634             } else {
1635                 sizes = Arrays.copyOf(sizes, sizeIndex);
1636             }
1637         } else if (sizeIndex != sizesCount) {
1638             throw new AssertionError(
1639                     "Too few sizes (expected " + sizesCount + ", actual " + sizeIndex + ")");
1640         }
1641 
1642         return sizes;
1643     }
1644 
1645     /** Get the list of publicly visible output formats */
getPublicFormats(boolean output)1646     private int[] getPublicFormats(boolean output) {
1647         int[] formats = new int[getPublicFormatCount(output)];
1648 
1649         int i = 0;
1650 
1651         SparseIntArray map = getFormatsMap(output);
1652         for (int j = 0; j < map.size(); j++) {
1653             int format = map.keyAt(j);
1654             formats[i++] = imageFormatToPublic(format);
1655         }
1656         if (output) {
1657             for (int j = 0; j < mDepthOutputFormats.size(); j++) {
1658                 formats[i++] = depthFormatToPublic(mDepthOutputFormats.keyAt(j));
1659             }
1660             if (mDynamicDepthOutputFormats.size() > 0) {
1661                 // Only one publicly dynamic depth format is available.
1662                 formats[i++] = ImageFormat.DEPTH_JPEG;
1663             }
1664             if (mHeicOutputFormats.size() > 0) {
1665                 formats[i++] = ImageFormat.HEIC;
1666             }
1667             if (mJpegROutputFormats.size() > 0) {
1668                 formats[i++] = ImageFormat.JPEG_R;
1669             }
1670         }
1671         if (formats.length != i) {
1672             throw new AssertionError("Too few formats " + i + ", expected " + formats.length);
1673         }
1674 
1675         return formats;
1676     }
1677 
1678     /** Get the format -> size count map for either output or input formats */
getFormatsMap(boolean output)1679     private SparseIntArray getFormatsMap(boolean output) {
1680         return output ? mAllOutputFormats : mInputFormats;
1681     }
1682 
getInternalFormatDuration(int format, int dataspace, Size size, int duration)1683     private long getInternalFormatDuration(int format, int dataspace, Size size, int duration) {
1684         // assume format is already checked, since its internal
1685 
1686         if (!isSupportedInternalConfiguration(format, dataspace, size)) {
1687             throw new IllegalArgumentException("size was not supported");
1688         }
1689 
1690         StreamConfigurationDuration[] durations = getDurations(duration, dataspace);
1691 
1692         for (StreamConfigurationDuration configurationDuration : durations) {
1693             if (configurationDuration.getFormat() == format &&
1694                     configurationDuration.getWidth() == size.getWidth() &&
1695                     configurationDuration.getHeight() == size.getHeight()) {
1696                 return configurationDuration.getDuration();
1697             }
1698         }
1699         // Default duration is '0' (unsupported/no extra stall)
1700         return 0;
1701     }
1702 
1703     /**
1704      * Get the durations array for the kind of duration
1705      *
1706      * @see #DURATION_MIN_FRAME
1707      * @see #DURATION_STALL
1708      * */
getDurations(int duration, int dataspace)1709     private StreamConfigurationDuration[] getDurations(int duration, int dataspace) {
1710         switch (duration) {
1711             case DURATION_MIN_FRAME:
1712                 return (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations :
1713                         (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ?
1714                         mDynamicDepthMinFrameDurations :
1715                         (dataspace == HAL_DATASPACE_HEIF) ? mHeicMinFrameDurations :
1716                         (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRMinFrameDurations :
1717                         mMinFrameDurations;
1718 
1719             case DURATION_STALL:
1720                 return (dataspace == HAL_DATASPACE_DEPTH) ? mDepthStallDurations :
1721                         (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthStallDurations :
1722                         (dataspace == HAL_DATASPACE_HEIF) ? mHeicStallDurations :
1723                         (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRStallDurations :
1724                         mStallDurations;
1725             default:
1726                 throw new IllegalArgumentException("duration was invalid");
1727         }
1728     }
1729 
1730     /** Count the number of publicly-visible output formats */
getPublicFormatCount(boolean output)1731     private int getPublicFormatCount(boolean output) {
1732         SparseIntArray formatsMap = getFormatsMap(output);
1733         int size = formatsMap.size();
1734         if (output) {
1735             size += mDepthOutputFormats.size();
1736             size += mDynamicDepthOutputFormats.size();
1737             size += mHeicOutputFormats.size();
1738             size += mJpegROutputFormats.size();
1739         }
1740 
1741         return size;
1742     }
1743 
arrayContains(T[] array, T element)1744     private static <T> boolean arrayContains(T[] array, T element) {
1745         if (array == null) {
1746             return false;
1747         }
1748 
1749         for (T el : array) {
1750             if (Objects.equals(el, element)) {
1751                 return true;
1752             }
1753         }
1754 
1755         return false;
1756     }
1757 
isSupportedInternalConfiguration(int format, int dataspace, Size size)1758     private boolean isSupportedInternalConfiguration(int format, int dataspace, Size size) {
1759         StreamConfiguration[] configurations =
1760                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations :
1761                 (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthConfigurations :
1762                 (dataspace == HAL_DATASPACE_HEIF) ? mHeicConfigurations :
1763                 (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRConfigurations :
1764                 mConfigurations;
1765 
1766         for (int i = 0; i < configurations.length; i++) {
1767             if (configurations[i].getFormat() == format &&
1768                     configurations[i].getSize().equals(size)) {
1769                 return true;
1770             }
1771         }
1772 
1773         return false;
1774     }
1775 
1776     /**
1777      * Return this {@link StreamConfigurationMap} as a string representation.
1778      *
1779      * <p>{@code "StreamConfigurationMap(Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d,
1780      * stall:%d], ... [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]), Inputs([w:%d, h:%d,
1781      * format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)]), ValidOutputFormatsForInput(
1782      * [in:%d, out:%d, ... %d], ... [in:%d, out:%d, ... %d]), HighSpeedVideoConfigurations(
1783      * [w:%d, h:%d, min_fps:%d, max_fps:%d], ... [w:%d, h:%d, min_fps:%d, max_fps:%d]))"}.</p>
1784      *
1785      * <p>{@code Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d], ...
1786      * [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d])}, where
1787      * {@code [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]} represents an output
1788      * configuration's width, height, format, minimal frame duration in nanoseconds, and stall
1789      * duration in nanoseconds.</p>
1790      *
1791      * <p>{@code Inputs([w:%d, h:%d, format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)])}, where
1792      * {@code [w:%d, h:%d, format:%s(%d)]} represents an input configuration's width, height, and
1793      * format.</p>
1794      *
1795      * <p>{@code ValidOutputFormatsForInput([in:%s(%d), out:%s(%d), ... %s(%d)],
1796      * ... [in:%s(%d), out:%s(%d), ... %s(%d)])}, where {@code [in:%s(%d), out:%s(%d), ... %s(%d)]}
1797      * represents an input format and its valid output formats.</p>
1798      *
1799      * <p>{@code HighSpeedVideoConfigurations([w:%d, h:%d, min_fps:%d, max_fps:%d],
1800      * ... [w:%d, h:%d, min_fps:%d, max_fps:%d])}, where
1801      * {@code [w:%d, h:%d, min_fps:%d, max_fps:%d]} represents a high speed video output
1802      * configuration's width, height, minimal frame rate, and maximal frame rate.</p>
1803      *
1804      * @return string representation of {@link StreamConfigurationMap}
1805      */
1806     @Override
toString()1807     public String toString() {
1808         StringBuilder sb = new StringBuilder("StreamConfiguration(");
1809         appendOutputsString(sb);
1810         sb.append(", ");
1811         appendHighResOutputsString(sb);
1812         sb.append(", ");
1813         appendInputsString(sb);
1814         sb.append(", ");
1815         appendValidOutputFormatsForInputString(sb);
1816         sb.append(", ");
1817         appendHighSpeedVideoConfigurationsString(sb);
1818         sb.append(")");
1819 
1820         return sb.toString();
1821     }
1822 
1823     /**
1824      * Size comparison method used by size comparators.
1825      *
1826      * @hide
1827      */
compareSizes(int widthA, int heightA, int widthB, int heightB)1828     public static int compareSizes(int widthA, int heightA, int widthB, int heightB) {
1829         long left = widthA * (long) heightA;
1830         long right = widthB * (long) heightB;
1831         if (left == right) {
1832             left = widthA;
1833             right = widthB;
1834         }
1835         return (left < right) ? -1 : (left > right ? 1 : 0);
1836     }
1837 
appendOutputsString(StringBuilder sb)1838     private void appendOutputsString(StringBuilder sb) {
1839         sb.append("Outputs(");
1840         int[] formats = getOutputFormats();
1841         for (int format : formats) {
1842             Size[] sizes = getOutputSizes(format);
1843             for (Size size : sizes) {
1844                 long minFrameDuration = getOutputMinFrameDuration(format, size);
1845                 long stallDuration = getOutputStallDuration(format, size);
1846                 sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
1847                         "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
1848                         format, minFrameDuration, stallDuration));
1849             }
1850         }
1851         // Remove the pending ", "
1852         if (sb.charAt(sb.length() - 1) == ' ') {
1853             sb.delete(sb.length() - 2, sb.length());
1854         }
1855         sb.append(")");
1856     }
1857 
appendHighResOutputsString(StringBuilder sb)1858     private void appendHighResOutputsString(StringBuilder sb) {
1859         sb.append("HighResolutionOutputs(");
1860         int[] formats = getOutputFormats();
1861         for (int format : formats) {
1862             Size[] sizes = getHighResolutionOutputSizes(format);
1863             if (sizes == null) continue;
1864             for (Size size : sizes) {
1865                 long minFrameDuration = getOutputMinFrameDuration(format, size);
1866                 long stallDuration = getOutputStallDuration(format, size);
1867                 sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
1868                         "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
1869                         format, minFrameDuration, stallDuration));
1870             }
1871         }
1872         // Remove the pending ", "
1873         if (sb.charAt(sb.length() - 1) == ' ') {
1874             sb.delete(sb.length() - 2, sb.length());
1875         }
1876         sb.append(")");
1877     }
1878 
appendInputsString(StringBuilder sb)1879     private void appendInputsString(StringBuilder sb) {
1880         sb.append("Inputs(");
1881         int[] formats = getInputFormats();
1882         for (int format : formats) {
1883             Size[] sizes = getInputSizes(format);
1884             for (Size size : sizes) {
1885                 sb.append(String.format("[w:%d, h:%d, format:%s(%d)], ", size.getWidth(),
1886                         size.getHeight(), formatToString(format), format));
1887             }
1888         }
1889         // Remove the pending ", "
1890         if (sb.charAt(sb.length() - 1) == ' ') {
1891             sb.delete(sb.length() - 2, sb.length());
1892         }
1893         sb.append(")");
1894     }
1895 
appendValidOutputFormatsForInputString(StringBuilder sb)1896     private void appendValidOutputFormatsForInputString(StringBuilder sb) {
1897         sb.append("ValidOutputFormatsForInput(");
1898         int[] inputFormats = getInputFormats();
1899         for (int inputFormat : inputFormats) {
1900             sb.append(String.format("[in:%s(%d), out:", formatToString(inputFormat), inputFormat));
1901             int[] outputFormats = getValidOutputFormatsForInput(inputFormat);
1902             for (int i = 0; i < outputFormats.length; i++) {
1903                 sb.append(String.format("%s(%d)", formatToString(outputFormats[i]),
1904                         outputFormats[i]));
1905                 if (i < outputFormats.length - 1) {
1906                     sb.append(", ");
1907                 }
1908             }
1909             sb.append("], ");
1910         }
1911         // Remove the pending ", "
1912         if (sb.charAt(sb.length() - 1) == ' ') {
1913             sb.delete(sb.length() - 2, sb.length());
1914         }
1915         sb.append(")");
1916     }
1917 
appendHighSpeedVideoConfigurationsString(StringBuilder sb)1918     private void appendHighSpeedVideoConfigurationsString(StringBuilder sb) {
1919         sb.append("HighSpeedVideoConfigurations(");
1920         Size[] sizes = getHighSpeedVideoSizes();
1921         for (Size size : sizes) {
1922             Range<Integer>[] ranges = getHighSpeedVideoFpsRangesFor(size);
1923             for (Range<Integer> range : ranges) {
1924                 sb.append(String.format("[w:%d, h:%d, min_fps:%d, max_fps:%d], ", size.getWidth(),
1925                         size.getHeight(), range.getLower(), range.getUpper()));
1926             }
1927         }
1928         // Remove the pending ", "
1929         if (sb.charAt(sb.length() - 1) == ' ') {
1930             sb.delete(sb.length() - 2, sb.length());
1931         }
1932         sb.append(")");
1933     }
1934 
1935     /**
1936      * @hide
1937      */
formatToString(int format)1938     public static String formatToString(int format) {
1939         switch (format) {
1940             case ImageFormat.YV12:
1941                 return "YV12";
1942             case ImageFormat.YUV_420_888:
1943                 return "YUV_420_888";
1944             case ImageFormat.NV21:
1945                 return "NV21";
1946             case ImageFormat.NV16:
1947                 return "NV16";
1948             case PixelFormat.RGB_565:
1949                 return "RGB_565";
1950             case PixelFormat.RGBA_8888:
1951                 return "RGBA_8888";
1952             case PixelFormat.RGBX_8888:
1953                 return "RGBX_8888";
1954             case PixelFormat.RGB_888:
1955                 return "RGB_888";
1956             case ImageFormat.JPEG:
1957                 return "JPEG";
1958             case ImageFormat.YUY2:
1959                 return "YUY2";
1960             case ImageFormat.Y8:
1961                 return "Y8";
1962             case ImageFormat.Y16:
1963                 return "Y16";
1964             case ImageFormat.RAW_SENSOR:
1965                 return "RAW_SENSOR";
1966             case ImageFormat.RAW_PRIVATE:
1967                 return "RAW_PRIVATE";
1968             case ImageFormat.RAW10:
1969                 return "RAW10";
1970             case ImageFormat.DEPTH16:
1971                 return "DEPTH16";
1972             case ImageFormat.DEPTH_POINT_CLOUD:
1973                 return "DEPTH_POINT_CLOUD";
1974             case ImageFormat.DEPTH_JPEG:
1975                 return "DEPTH_JPEG";
1976             case ImageFormat.RAW_DEPTH:
1977                 return "RAW_DEPTH";
1978             case ImageFormat.RAW_DEPTH10:
1979                 return "RAW_DEPTH10";
1980             case ImageFormat.PRIVATE:
1981                 return "PRIVATE";
1982             case ImageFormat.HEIC:
1983                 return "HEIC";
1984             case ImageFormat.JPEG_R:
1985                 return "JPEG/R";
1986             default:
1987                 return "UNKNOWN";
1988         }
1989     }
1990 
1991     // from system/core/include/system/graphics.h
1992     private static final int HAL_PIXEL_FORMAT_RAW16 = 0x20;
1993     /** @hide */
1994     public static final int HAL_PIXEL_FORMAT_BLOB = 0x21;
1995     private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
1996     private static final int HAL_PIXEL_FORMAT_YCbCr_420_888 = 0x23;
1997     private static final int HAL_PIXEL_FORMAT_RAW_OPAQUE = 0x24;
1998     private static final int HAL_PIXEL_FORMAT_RAW10 = 0x25;
1999     private static final int HAL_PIXEL_FORMAT_RAW12 = 0x26;
2000     private static final int HAL_PIXEL_FORMAT_Y16 = 0x20363159;
2001 
2002 
2003     private static final int HAL_DATASPACE_STANDARD_SHIFT = 16;
2004     private static final int HAL_DATASPACE_TRANSFER_SHIFT = 22;
2005     private static final int HAL_DATASPACE_RANGE_SHIFT = 27;
2006 
2007     private static final int HAL_DATASPACE_UNKNOWN = 0x0;
2008     /** @hide */
2009     public static final int HAL_DATASPACE_V0_JFIF =
2010             (2 << HAL_DATASPACE_STANDARD_SHIFT) |
2011             (3 << HAL_DATASPACE_TRANSFER_SHIFT) |
2012             (1 << HAL_DATASPACE_RANGE_SHIFT);
2013 
2014     /**
2015      * @hide
2016      */
2017     public static final int HAL_DATASPACE_DEPTH = 0x1000;
2018     /**
2019      * @hide
2020      */
2021     public static final int HAL_DATASPACE_DYNAMIC_DEPTH = 0x1002;
2022     /**
2023      * @hide
2024      */
2025     public static final int HAL_DATASPACE_HEIF = 0x1004;
2026     /**
2027      * @hide
2028      */
2029     public static final int HAL_DATASPACE_JPEG_R = 0x1005;
2030     private static final long DURATION_20FPS_NS = 50000000L;
2031     /**
2032      * @see #getDurations(int, int)
2033      */
2034     private static final int DURATION_MIN_FRAME = 0;
2035     private static final int DURATION_STALL = 1;
2036 
2037     private final StreamConfiguration[] mConfigurations;
2038     private final StreamConfigurationDuration[] mMinFrameDurations;
2039     private final StreamConfigurationDuration[] mStallDurations;
2040 
2041     private final StreamConfiguration[] mDepthConfigurations;
2042     private final StreamConfigurationDuration[] mDepthMinFrameDurations;
2043     private final StreamConfigurationDuration[] mDepthStallDurations;
2044 
2045     private final StreamConfiguration[] mDynamicDepthConfigurations;
2046     private final StreamConfigurationDuration[] mDynamicDepthMinFrameDurations;
2047     private final StreamConfigurationDuration[] mDynamicDepthStallDurations;
2048 
2049     private final StreamConfiguration[] mHeicConfigurations;
2050     private final StreamConfigurationDuration[] mHeicMinFrameDurations;
2051     private final StreamConfigurationDuration[] mHeicStallDurations;
2052 
2053     private final StreamConfiguration[] mJpegRConfigurations;
2054     private final StreamConfigurationDuration[] mJpegRMinFrameDurations;
2055     private final StreamConfigurationDuration[] mJpegRStallDurations;
2056 
2057     private final HighSpeedVideoConfiguration[] mHighSpeedVideoConfigurations;
2058     private final ReprocessFormatsMap mInputOutputFormatsMap;
2059 
2060     private final boolean mListHighResolution;
2061 
2062     /** internal format -> num output sizes mapping, not including slow high-res sizes, for
2063      * non-depth dataspaces */
2064     private final SparseIntArray mOutputFormats = new SparseIntArray();
2065     /** internal format -> num output sizes mapping for slow high-res sizes, for non-depth
2066      * dataspaces */
2067     private final SparseIntArray mHighResOutputFormats = new SparseIntArray();
2068     /** internal format -> num output sizes mapping for all non-depth dataspaces */
2069     private final SparseIntArray mAllOutputFormats = new SparseIntArray();
2070     /** internal format -> num input sizes mapping, for input reprocessing formats */
2071     private final SparseIntArray mInputFormats = new SparseIntArray();
2072     /** internal format -> num depth output sizes mapping, for HAL_DATASPACE_DEPTH */
2073     private final SparseIntArray mDepthOutputFormats = new SparseIntArray();
2074     /** internal format -> num dynamic depth output sizes mapping, for HAL_DATASPACE_DYNAMIC_DEPTH */
2075     private final SparseIntArray mDynamicDepthOutputFormats = new SparseIntArray();
2076     /** internal format -> num heic output sizes mapping, for HAL_DATASPACE_HEIF */
2077     private final SparseIntArray mHeicOutputFormats = new SparseIntArray();
2078     /** internal format -> num Jpeg/R output sizes mapping, for HAL_DATASPACE_JPEG_R */
2079     private final SparseIntArray mJpegROutputFormats = new SparseIntArray();
2080 
2081     /** High speed video Size -> FPS range count mapping*/
2082     private final HashMap</*HighSpeedVideoSize*/Size, /*Count*/Integer> mHighSpeedVideoSizeMap =
2083             new HashMap<Size, Integer>();
2084     /** High speed video FPS range -> Size count mapping*/
2085     private final HashMap</*HighSpeedVideoFpsRange*/Range<Integer>, /*Count*/Integer>
2086             mHighSpeedVideoFpsRangeMap = new HashMap<Range<Integer>, Integer>();
2087 
2088 }
2089