• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.params;
18 
19 import static com.android.internal.util.Preconditions.checkArrayElementsNotNull;
20 
21 import android.graphics.ImageFormat;
22 import android.graphics.PixelFormat;
23 import android.hardware.camera2.CameraCharacteristics;
24 import android.hardware.camera2.CameraDevice;
25 import android.hardware.camera2.CameraMetadata;
26 import android.hardware.camera2.CaptureRequest;
27 import android.hardware.camera2.utils.HashCodeHelpers;
28 import android.hardware.camera2.utils.SurfaceUtils;
29 import android.util.Range;
30 import android.util.Size;
31 import android.util.SparseIntArray;
32 import android.view.Surface;
33 
34 import java.util.Arrays;
35 import java.util.HashMap;
36 import java.util.Objects;
37 import java.util.Set;
38 
39 /**
40  * Immutable class to store the available stream
41  * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP configurations} to set up
42  * {@link android.view.Surface Surfaces} for creating a
43  * {@link android.hardware.camera2.CameraCaptureSession capture session} with
44  * {@link android.hardware.camera2.CameraDevice#createCaptureSession(SessionConfiguration)}.
45  * <!-- TODO: link to input stream configuration -->
46  *
47  * <p>This is the authoritative list for all <!-- input/ -->output formats (and sizes respectively
48  * for that format) that are supported by a camera device.</p>
49  *
50  * <p>This also contains the minimum frame durations and stall durations for each format/size
51  * combination that can be used to calculate effective frame rate when submitting multiple captures.
52  * </p>
53  *
54  * <p>An instance of this object is available from {@link CameraCharacteristics} using
55  * the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP} key and the
56  * {@link CameraCharacteristics#get} method.</p>
57  *
58  * <pre><code>{@code
59  * CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
60  * StreamConfigurationMap configs = characteristics.get(
61  *         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
62  * }</code></pre>
63  *
64  * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
65  * @see CameraDevice#createCaptureSession(SessionConfiguration)
66  */
67 public final class StreamConfigurationMap {
68 
69     private static final String TAG = "StreamConfigurationMap";
70 
71     private static final int MAX_DIMEN_FOR_ROUNDING = 1920; // maximum allowed width for rounding
72 
73     /**
74      * Create a new {@link StreamConfigurationMap}.
75      *
76      * <p>The array parameters ownership is passed to this object after creation; do not
77      * write to them after this constructor is invoked.</p>
78      *
79      * @param configurations a non-{@code null} array of {@link StreamConfiguration}
80      * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
81      * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
82      * @param depthConfigurations a non-{@code null} array of depth {@link StreamConfiguration}
83      * @param depthMinFrameDurations a non-{@code null} array of depth
84      *        {@link StreamConfigurationDuration}
85      * @param depthStallDurations a non-{@code null} array of depth
86      *        {@link StreamConfigurationDuration}
87      * @param dynamicDepthConfigurations a non-{@code null} array of dynamic depth
88      *        {@link StreamConfiguration}
89      * @param dynamicDepthMinFrameDurations a non-{@code null} array of dynamic depth
90      *        {@link StreamConfigurationDuration}
91      * @param dynamicDepthStallDurations a non-{@code null} array of dynamic depth
92      *        {@link StreamConfigurationDuration}
93      * @param heicConfigurations a non-{@code null} array of heic {@link StreamConfiguration}
94      * @param heicMinFrameDurations a non-{@code null} array of heic
95      *        {@link StreamConfigurationDuration}
96      * @param heicStallDurations a non-{@code null} array of heic
97      *        {@link StreamConfigurationDuration}
98      * @param jpegRConfigurations a non-{@code null} array of Jpeg/R {@link StreamConfiguration}
99      * @param jpegRMinFrameDurations a non-{@code null} array of Jpeg/R
100      *        {@link StreamConfigurationDuration}
101      * @param jpegRStallDurations a non-{@code null} array of Jpeg/R
102      *        {@link StreamConfigurationDuration}
103      * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
104      *        camera device does not support high speed video recording
105      * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
106      *        and thus needs a separate list of slow high-resolution output sizes
107      * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations
108      *         were {@code null} or any subelements were {@code null}
109      *
110      * @hide
111      */
StreamConfigurationMap( StreamConfiguration[] configurations, StreamConfigurationDuration[] minFrameDurations, StreamConfigurationDuration[] stallDurations, StreamConfiguration[] depthConfigurations, StreamConfigurationDuration[] depthMinFrameDurations, StreamConfigurationDuration[] depthStallDurations, StreamConfiguration[] dynamicDepthConfigurations, StreamConfigurationDuration[] dynamicDepthMinFrameDurations, StreamConfigurationDuration[] dynamicDepthStallDurations, StreamConfiguration[] heicConfigurations, StreamConfigurationDuration[] heicMinFrameDurations, StreamConfigurationDuration[] heicStallDurations, StreamConfiguration[] jpegRConfigurations, StreamConfigurationDuration[] jpegRMinFrameDurations, StreamConfigurationDuration[] jpegRStallDurations, HighSpeedVideoConfiguration[] highSpeedVideoConfigurations, ReprocessFormatsMap inputOutputFormatsMap, boolean listHighResolution)112     public StreamConfigurationMap(
113             StreamConfiguration[] configurations,
114             StreamConfigurationDuration[] minFrameDurations,
115             StreamConfigurationDuration[] stallDurations,
116             StreamConfiguration[] depthConfigurations,
117             StreamConfigurationDuration[] depthMinFrameDurations,
118             StreamConfigurationDuration[] depthStallDurations,
119             StreamConfiguration[] dynamicDepthConfigurations,
120             StreamConfigurationDuration[] dynamicDepthMinFrameDurations,
121             StreamConfigurationDuration[] dynamicDepthStallDurations,
122             StreamConfiguration[] heicConfigurations,
123             StreamConfigurationDuration[] heicMinFrameDurations,
124             StreamConfigurationDuration[] heicStallDurations,
125             StreamConfiguration[] jpegRConfigurations,
126             StreamConfigurationDuration[] jpegRMinFrameDurations,
127             StreamConfigurationDuration[] jpegRStallDurations,
128             HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
129             ReprocessFormatsMap inputOutputFormatsMap,
130             boolean listHighResolution) {
131         this(configurations, minFrameDurations, stallDurations,
132                     depthConfigurations, depthMinFrameDurations, depthStallDurations,
133                     dynamicDepthConfigurations, dynamicDepthMinFrameDurations,
134                     dynamicDepthStallDurations,
135                     heicConfigurations, heicMinFrameDurations, heicStallDurations,
136                     jpegRConfigurations, jpegRMinFrameDurations, jpegRStallDurations,
137                     highSpeedVideoConfigurations, inputOutputFormatsMap, listHighResolution,
138                     /*enforceImplementationDefined*/ true);
139     }
140 
141     /**
142      * Create a new {@link StreamConfigurationMap}.
143      *
144      * <p>The array parameters ownership is passed to this object after creation; do not
145      * write to them after this constructor is invoked.</p>
146      *
147      * @param configurations a non-{@code null} array of {@link StreamConfiguration}
148      * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
149      * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
150      * @param depthConfigurations a non-{@code null} array of depth {@link StreamConfiguration}
151      * @param depthMinFrameDurations a non-{@code null} array of depth
152      *        {@link StreamConfigurationDuration}
153      * @param depthStallDurations a non-{@code null} array of depth
154      *        {@link StreamConfigurationDuration}
155      * @param dynamicDepthConfigurations a non-{@code null} array of dynamic depth
156      *        {@link StreamConfiguration}
157      * @param dynamicDepthMinFrameDurations a non-{@code null} array of dynamic depth
158      *        {@link StreamConfigurationDuration}
159      * @param dynamicDepthStallDurations a non-{@code null} array of dynamic depth
160      *        {@link StreamConfigurationDuration}
161      * @param heicConfigurations a non-{@code null} array of heic {@link StreamConfiguration}
162      * @param heicMinFrameDurations a non-{@code null} array of heic
163      *        {@link StreamConfigurationDuration}
164      * @param heicStallDurations a non-{@code null} array of heic
165      *        {@link StreamConfigurationDuration}
166      * @param jpegRConfigurations a non-{@code null} array of Jpeg/R {@link StreamConfiguration}
167      * @param jpegRMinFrameDurations a non-{@code null} array of Jpeg/R
168      *        {@link StreamConfigurationDuration}
169      * @param jpegRStallDurations a non-{@code null} array of Jpeg/R
170      *        {@link StreamConfigurationDuration}
171      * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
172      *        camera device does not support high speed video recording
173      * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
174      *        and thus needs a separate list of slow high-resolution output sizes
175      * @param enforceImplementationDefined a flag indicating whether
176      *        IMPLEMENTATION_DEFINED format configuration must be present
177      * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations
178      *         were {@code null} or any subelements were {@code null}
179      *
180      * @hide
181      */
StreamConfigurationMap( StreamConfiguration[] configurations, StreamConfigurationDuration[] minFrameDurations, StreamConfigurationDuration[] stallDurations, StreamConfiguration[] depthConfigurations, StreamConfigurationDuration[] depthMinFrameDurations, StreamConfigurationDuration[] depthStallDurations, StreamConfiguration[] dynamicDepthConfigurations, StreamConfigurationDuration[] dynamicDepthMinFrameDurations, StreamConfigurationDuration[] dynamicDepthStallDurations, StreamConfiguration[] heicConfigurations, StreamConfigurationDuration[] heicMinFrameDurations, StreamConfigurationDuration[] heicStallDurations, StreamConfiguration[] jpegRConfigurations, StreamConfigurationDuration[] jpegRMinFrameDurations, StreamConfigurationDuration[] jpegRStallDurations, HighSpeedVideoConfiguration[] highSpeedVideoConfigurations, ReprocessFormatsMap inputOutputFormatsMap, boolean listHighResolution, boolean enforceImplementationDefined)182     public StreamConfigurationMap(
183             StreamConfiguration[] configurations,
184             StreamConfigurationDuration[] minFrameDurations,
185             StreamConfigurationDuration[] stallDurations,
186             StreamConfiguration[] depthConfigurations,
187             StreamConfigurationDuration[] depthMinFrameDurations,
188             StreamConfigurationDuration[] depthStallDurations,
189             StreamConfiguration[] dynamicDepthConfigurations,
190             StreamConfigurationDuration[] dynamicDepthMinFrameDurations,
191             StreamConfigurationDuration[] dynamicDepthStallDurations,
192             StreamConfiguration[] heicConfigurations,
193             StreamConfigurationDuration[] heicMinFrameDurations,
194             StreamConfigurationDuration[] heicStallDurations,
195             StreamConfiguration[] jpegRConfigurations,
196             StreamConfigurationDuration[] jpegRMinFrameDurations,
197             StreamConfigurationDuration[] jpegRStallDurations,
198             HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
199             ReprocessFormatsMap inputOutputFormatsMap,
200             boolean listHighResolution,
201             boolean enforceImplementationDefined) {
202 
203         if (configurations == null &&
204                 depthConfigurations == null &&
205                 heicConfigurations == null) {
206             throw new NullPointerException("At least one of color/depth/heic configurations " +
207                     "must not be null");
208         }
209 
210         if (configurations == null) {
211             // If no color configurations exist, ensure depth ones do
212             mConfigurations = new StreamConfiguration[0];
213             mMinFrameDurations = new StreamConfigurationDuration[0];
214             mStallDurations = new StreamConfigurationDuration[0];
215         } else {
216             mConfigurations = checkArrayElementsNotNull(configurations, "configurations");
217             mMinFrameDurations = checkArrayElementsNotNull(minFrameDurations, "minFrameDurations");
218             mStallDurations = checkArrayElementsNotNull(stallDurations, "stallDurations");
219         }
220 
221         mListHighResolution = listHighResolution;
222 
223         if (depthConfigurations == null) {
224             mDepthConfigurations = new StreamConfiguration[0];
225             mDepthMinFrameDurations = new StreamConfigurationDuration[0];
226             mDepthStallDurations = new StreamConfigurationDuration[0];
227         } else {
228             mDepthConfigurations = checkArrayElementsNotNull(depthConfigurations,
229                     "depthConfigurations");
230             mDepthMinFrameDurations = checkArrayElementsNotNull(depthMinFrameDurations,
231                     "depthMinFrameDurations");
232             mDepthStallDurations = checkArrayElementsNotNull(depthStallDurations,
233                     "depthStallDurations");
234         }
235 
236         if (dynamicDepthConfigurations == null) {
237             mDynamicDepthConfigurations = new StreamConfiguration[0];
238             mDynamicDepthMinFrameDurations = new StreamConfigurationDuration[0];
239             mDynamicDepthStallDurations = new StreamConfigurationDuration[0];
240         } else {
241             mDynamicDepthConfigurations = checkArrayElementsNotNull(dynamicDepthConfigurations,
242                     "dynamicDepthConfigurations");
243             mDynamicDepthMinFrameDurations = checkArrayElementsNotNull(
244                     dynamicDepthMinFrameDurations, "dynamicDepthMinFrameDurations");
245             mDynamicDepthStallDurations = checkArrayElementsNotNull(dynamicDepthStallDurations,
246                     "dynamicDepthStallDurations");
247         }
248 
249         if (heicConfigurations == null) {
250             mHeicConfigurations = new StreamConfiguration[0];
251             mHeicMinFrameDurations = new StreamConfigurationDuration[0];
252             mHeicStallDurations = new StreamConfigurationDuration[0];
253         } else {
254             mHeicConfigurations = checkArrayElementsNotNull(heicConfigurations,
255                     "heicConfigurations");
256             mHeicMinFrameDurations = checkArrayElementsNotNull(heicMinFrameDurations,
257                     "heicMinFrameDurations");
258             mHeicStallDurations = checkArrayElementsNotNull(heicStallDurations,
259                     "heicStallDurations");
260         }
261 
262 
263         if (jpegRConfigurations == null) {
264             mJpegRConfigurations = new StreamConfiguration[0];
265             mJpegRMinFrameDurations = new StreamConfigurationDuration[0];
266             mJpegRStallDurations = new StreamConfigurationDuration[0];
267         } else {
268             mJpegRConfigurations = checkArrayElementsNotNull(jpegRConfigurations,
269                     "jpegRConfigurations");
270             mJpegRMinFrameDurations = checkArrayElementsNotNull(jpegRMinFrameDurations,
271                     "jpegRFrameDurations");
272             mJpegRStallDurations = checkArrayElementsNotNull(jpegRStallDurations,
273                     "jpegRStallDurations");
274         }
275 
276         if (highSpeedVideoConfigurations == null) {
277             mHighSpeedVideoConfigurations = new HighSpeedVideoConfiguration[0];
278         } else {
279             mHighSpeedVideoConfigurations = checkArrayElementsNotNull(
280                     highSpeedVideoConfigurations, "highSpeedVideoConfigurations");
281         }
282 
283         // For each format, track how many sizes there are available to configure
284         for (StreamConfiguration config : mConfigurations) {
285             int fmt = config.getFormat();
286             SparseIntArray map = null;
287             if (config.isOutput()) {
288                 mAllOutputFormats.put(fmt, mAllOutputFormats.get(fmt) + 1);
289                 long duration = 0;
290                 if (mListHighResolution) {
291                     for (StreamConfigurationDuration configurationDuration : mMinFrameDurations) {
292                         if (configurationDuration.getFormat() == fmt &&
293                                 configurationDuration.getWidth() == config.getSize().getWidth() &&
294                                 configurationDuration.getHeight() == config.getSize().getHeight()) {
295                             duration = configurationDuration.getDuration();
296                             break;
297                         }
298                     }
299                 }
300                 map = duration <= DURATION_20FPS_NS ?
301                         mOutputFormats : mHighResOutputFormats;
302             } else {
303                 map = mInputFormats;
304             }
305             map.put(fmt, map.get(fmt) + 1);
306         }
307 
308         // For each depth format, track how many sizes there are available to configure
309         for (StreamConfiguration config : mDepthConfigurations) {
310             if (!config.isOutput()) {
311                 // Ignoring input depth configs
312                 continue;
313             }
314 
315             mDepthOutputFormats.put(config.getFormat(),
316                     mDepthOutputFormats.get(config.getFormat()) + 1);
317         }
318         for (StreamConfiguration config : mDynamicDepthConfigurations) {
319             if (!config.isOutput()) {
320                 // Ignoring input configs
321                 continue;
322             }
323 
324             mDynamicDepthOutputFormats.put(config.getFormat(),
325                     mDynamicDepthOutputFormats.get(config.getFormat()) + 1);
326         }
327 
328         // For each heic format, track how many sizes there are available to configure
329         for (StreamConfiguration config : mHeicConfigurations) {
330             if (!config.isOutput()) {
331                 // Ignoring input depth configs
332                 continue;
333             }
334 
335             mHeicOutputFormats.put(config.getFormat(),
336                     mHeicOutputFormats.get(config.getFormat()) + 1);
337         }
338 
339         // For each Jpeg/R format, track how many sizes there are available to configure
340         for (StreamConfiguration config : mJpegRConfigurations) {
341             if (!config.isOutput()) {
342                 // Ignoring input Jpeg/R configs
343                 continue;
344             }
345 
346             mJpegROutputFormats.put(config.getFormat(),
347                     mJpegROutputFormats.get(config.getFormat()) + 1);
348         }
349 
350         if (configurations != null && enforceImplementationDefined &&
351                 mOutputFormats.indexOfKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) < 0) {
352             throw new AssertionError(
353                     "At least one stream configuration for IMPLEMENTATION_DEFINED must exist");
354         }
355 
356         // For each Size/FPS range, track how many FPS range/Size there are available
357         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
358             Size size = config.getSize();
359             Range<Integer> fpsRange = config.getFpsRange();
360             Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
361             if (fpsRangeCount == null) {
362                 fpsRangeCount = 0;
363             }
364             mHighSpeedVideoSizeMap.put(size, fpsRangeCount + 1);
365             Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
366             if (sizeCount == null) {
367                 sizeCount = 0;
368             }
369             mHighSpeedVideoFpsRangeMap.put(fpsRange, sizeCount + 1);
370         }
371 
372         mInputOutputFormatsMap = inputOutputFormatsMap;
373     }
374 
375     /**
376      * Get the image {@code format} output formats in this stream configuration.
377      *
378      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
379      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
380      *
381      * <p>Formats listed in this array are guaranteed to return true if queried with
382      * {@link #isOutputSupportedFor(int)}.</p>
383      *
384      * @return an array of integer format
385      *
386      * @see ImageFormat
387      * @see PixelFormat
388      */
getOutputFormats()389     public int[] getOutputFormats() {
390         return getPublicFormats(/*output*/true);
391     }
392 
393     /**
394      * Get the image {@code format} output formats for a reprocessing input format.
395      *
396      * <p>When submitting a {@link CaptureRequest} with an input Surface of a given format,
397      * the only allowed target outputs of the {@link CaptureRequest} are the ones with a format
398      * listed in the return value of this method. Including any other output Surface as a target
399      * will throw an IllegalArgumentException. If no output format is supported given the input
400      * format, an empty int[] will be returned.</p>
401      *
402      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
403      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
404      *
405      * <p>Formats listed in this array are guaranteed to return true if queried with
406      * {@link #isOutputSupportedFor(int)}.</p>
407      *
408      * @return an array of integer format
409      *
410      * @see ImageFormat
411      * @see PixelFormat
412      */
getValidOutputFormatsForInput(int inputFormat)413     public int[] getValidOutputFormatsForInput(int inputFormat) {
414         if (mInputOutputFormatsMap == null) {
415             return new int[0];
416         }
417 
418         int[] outputs = mInputOutputFormatsMap.getOutputs(inputFormat);
419         if (mHeicOutputFormats.size() > 0) {
420             // All reprocessing formats map contain JPEG.
421             int[] outputsWithHeic = Arrays.copyOf(outputs, outputs.length+1);
422             outputsWithHeic[outputs.length] = ImageFormat.HEIC;
423             return outputsWithHeic;
424         } else {
425             return outputs;
426         }
427     }
428 
429     /**
430      * Get the image {@code format} input formats in this stream configuration.
431      *
432      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
433      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
434      *
435      * @return an array of integer format
436      *
437      * @see ImageFormat
438      * @see PixelFormat
439      */
getInputFormats()440     public int[] getInputFormats() {
441         return getPublicFormats(/*output*/false);
442     }
443 
444     /**
445      * Get the supported input sizes for this input format.
446      *
447      * <p>The format must have come from {@link #getInputFormats}; otherwise
448      * {@code null} is returned.</p>
449      *
450      * @param format a format from {@link #getInputFormats}
451      * @return a non-empty array of sizes, or {@code null} if the format was not available.
452      */
getInputSizes(final int format)453     public Size[] getInputSizes(final int format) {
454         return getPublicFormatSizes(format, /*output*/false, /*highRes*/false);
455     }
456 
457     /**
458      * Determine whether or not output surfaces with a particular user-defined format can be passed
459      * {@link CameraDevice#createCaptureSession(SessionConfiguration) createCaptureSession}.
460      *
461      * <p>This method determines that the output {@code format} is supported by the camera device;
462      * each output {@code surface} target may or may not itself support that {@code format}.
463      * Refer to the class which provides the surface for additional documentation.</p>
464      *
465      * <p>Formats for which this returns {@code true} are guaranteed to exist in the result
466      * returned by {@link #getOutputSizes}.</p>
467      *
468      * @param format an image format from either {@link ImageFormat} or {@link PixelFormat}
469      * @return
470      *          {@code true} iff using a {@code surface} with this {@code format} will be
471      *          supported with {@link CameraDevice#createCaptureSession(SessionConfiguration)}
472      *
473      * @throws IllegalArgumentException
474      *          if the image format was not a defined named constant
475      *          from either {@link ImageFormat} or {@link PixelFormat}
476      *
477      * @see ImageFormat
478      * @see PixelFormat
479      * @see CameraDevice#createCaptureSession(SessionConfiguration)
480      */
isOutputSupportedFor(int format)481     public boolean isOutputSupportedFor(int format) {
482         checkArgumentFormat(format);
483 
484         int internalFormat = imageFormatToInternal(format);
485         int dataspace = imageFormatToDataspace(format);
486         if (dataspace == HAL_DATASPACE_DEPTH) {
487             return mDepthOutputFormats.indexOfKey(internalFormat) >= 0;
488         } else if (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) {
489             return mDynamicDepthOutputFormats.indexOfKey(internalFormat) >= 0;
490         } else if (dataspace == HAL_DATASPACE_HEIF) {
491             return mHeicOutputFormats.indexOfKey(internalFormat) >= 0;
492         } else if (dataspace == HAL_DATASPACE_JPEG_R) {
493             return mJpegROutputFormats.indexOfKey(internalFormat) >= 0;
494         } else {
495             return getFormatsMap(/*output*/true).indexOfKey(internalFormat) >= 0;
496         }
497     }
498 
499     /**
500      * Determine whether or not output streams can be configured with a particular class
501      * as a consumer.
502      *
503      * <p>The following list is generally usable for outputs:
504      * <ul>
505      * <li>{@link android.media.ImageReader} -
506      * Recommended for image processing or streaming to external resources (such as a file or
507      * network)
508      * <li>{@link android.media.MediaRecorder} -
509      * Recommended for recording video (simple to use)
510      * <li>{@link android.media.MediaCodec} -
511      * Recommended for recording video (more complicated to use, with more flexibility)
512      * <li>{@link android.view.SurfaceHolder} -
513      * Recommended for low-power camera preview with {@link android.view.SurfaceView}
514      * <li>{@link android.graphics.SurfaceTexture} -
515      * Recommended for OpenGL-accelerated preview processing or compositing with
516      * {@link android.view.TextureView}
517      * </ul>
518      * </p>
519      *
520      * <p>Generally speaking this means that creating a {@link Surface} from that class <i>may</i>
521      * provide a producer endpoint that is suitable to be used with
522      * {@link CameraDevice#createCaptureSession(SessionConfiguration)}.</p>
523      *
524      * <p>Since not all of the above classes support output of all format and size combinations,
525      * the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.</p>
526      *
527      * @param klass a non-{@code null} {@link Class} object reference
528      * @return {@code true} if this class is supported as an output, {@code false} otherwise
529      *
530      * @throws NullPointerException if {@code klass} was {@code null}
531      *
532      * @see CameraDevice#createCaptureSession(SessionConfiguration)
533      * @see #isOutputSupportedFor(Surface)
534      */
isOutputSupportedFor(Class<T> klass)535     public static <T> boolean isOutputSupportedFor(Class<T> klass) {
536         Objects.requireNonNull(klass, "klass must not be null");
537 
538         if (klass == android.media.ImageReader.class) {
539             return true;
540         } else if (klass == android.media.MediaRecorder.class) {
541             return true;
542         } else if (klass == android.media.MediaCodec.class) {
543             return true;
544         } else if (klass == android.renderscript.Allocation.class) {
545             return true;
546         } else if (klass == android.view.SurfaceHolder.class) {
547             return true;
548         } else if (klass == android.graphics.SurfaceTexture.class) {
549             return true;
550         }
551 
552         return false;
553     }
554 
555     /**
556      * Determine whether or not the {@code surface} in its current
557      * state is suitable to be included in a {@link
558      * CameraDevice#createCaptureSession(SessionConfiguration) capture
559      * session} as an output.
560      *
561      * <p>Not all surfaces are usable with the {@link CameraDevice}, and not all configurations
562      * of that {@code surface} are compatible. Some classes that provide the {@code surface} are
563      * compatible with the {@link CameraDevice} in general
564      * (see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the
565      * {@code surface} into a state that will be compatible with the {@link CameraDevice}.</p>
566      *
567      * <p>Reasons for a {@code surface} being specifically incompatible might be:
568      * <ul>
569      * <li>Using a format that's not listed by {@link #getOutputFormats}
570      * <li>Using a format/size combination that's not listed by {@link #getOutputSizes}
571      * <li>The {@code surface} itself is not in a state where it can service a new producer.</p>
572      * </li>
573      * </ul>
574      *
575      * <p>Surfaces from flexible sources will return true even if the exact size of the Surface does
576      * not match a camera-supported size, as long as the format (or class) is supported and the
577      * camera device supports a size that is equal to or less than 1080p in that format. If such as
578      * Surface is used to create a capture session, it will have its size rounded to the nearest
579      * supported size, below or equal to 1080p. Flexible sources include SurfaceView, SurfaceTexture,
580      * and ImageReader.</p>
581      *
582      * <p>This is not an exhaustive list; see the particular class's documentation for further
583      * possible reasons of incompatibility.</p>
584      *
585      * @param surface a non-{@code null} {@link Surface} object reference
586      * @return {@code true} if this is supported, {@code false} otherwise
587      *
588      * @throws NullPointerException if {@code surface} was {@code null}
589      * @throws IllegalArgumentException if the Surface endpoint is no longer valid
590      *
591      * @see CameraDevice#createCaptureSession(SessionConfiguration)
592      * @see #isOutputSupportedFor(Class)
593      */
isOutputSupportedFor(Surface surface)594     public boolean isOutputSupportedFor(Surface surface) {
595         Objects.requireNonNull(surface, "surface must not be null");
596 
597         Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
598         int surfaceFormat = SurfaceUtils.getSurfaceFormat(surface);
599         int surfaceDataspace = SurfaceUtils.getSurfaceDataspace(surface);
600 
601         // See if consumer is flexible.
602         boolean isFlexible = SurfaceUtils.isFlexibleConsumer(surface);
603 
604         StreamConfiguration[] configs =
605                 surfaceDataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations :
606                 surfaceDataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations :
607                 surfaceDataspace == HAL_DATASPACE_HEIF ? mHeicConfigurations :
608                 surfaceDataspace == HAL_DATASPACE_JPEG_R ? mJpegRConfigurations :
609                 mConfigurations;
610         for (StreamConfiguration config : configs) {
611             if (config.getFormat() == surfaceFormat && config.isOutput()) {
612                 // Matching format, either need exact size match, or a flexible consumer
613                 // and a size no bigger than MAX_DIMEN_FOR_ROUNDING
614                 if (config.getSize().equals(surfaceSize)) {
615                     return true;
616                 } else if (isFlexible &&
617                         (config.getSize().getWidth() <= MAX_DIMEN_FOR_ROUNDING)) {
618                     return true;
619                 }
620             }
621         }
622         return false;
623     }
624 
625     /**
626      * Determine whether or not the particular stream configuration is
627      * suitable to be included in a {@link
628      * CameraDevice#createCaptureSession(SessionConfiguration) capture
629      * session} as an output.
630      *
631      * @param size stream configuration size
632      * @param format stream configuration format
633      * @return {@code true} if this is supported, {@code false} otherwise
634      *
635      * @see CameraDevice#createCaptureSession(SessionConfiguration)
636      * @see #isOutputSupportedFor(Class)
637      * @hide
638      */
isOutputSupportedFor(Size size, int format)639     public boolean isOutputSupportedFor(Size size, int format) {
640         int internalFormat = imageFormatToInternal(format);
641         int dataspace = imageFormatToDataspace(format);
642 
643         StreamConfiguration[] configs =
644                 dataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations :
645                 dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations :
646                 dataspace == HAL_DATASPACE_HEIF ? mHeicConfigurations :
647                 dataspace == HAL_DATASPACE_JPEG_R ? mJpegRConfigurations :
648                 mConfigurations;
649         for (StreamConfiguration config : configs) {
650             if ((config.getFormat() == internalFormat) && config.isOutput() &&
651                     config.getSize().equals(size)) {
652                 return true;
653             }
654         }
655 
656         return false;
657     }
658 
659     /**
660      * Get a list of sizes compatible with {@code klass} to use as an output.
661      *
662      * <p>Some of the supported classes may support additional formats beyond
663      * {@link ImageFormat#PRIVATE}; this function only returns
664      * sizes for {@link ImageFormat#PRIVATE}. For example, {@link android.media.ImageReader}
665      * supports {@link ImageFormat#YUV_420_888} and {@link ImageFormat#PRIVATE}, this method will
666      * only return the sizes for {@link ImageFormat#PRIVATE} for {@link android.media.ImageReader}
667      * class.</p>
668      *
669      * <p>If a well-defined format such as {@code NV21} is required, use
670      * {@link #getOutputSizes(int)} instead.</p>
671      *
672      * <p>The {@code klass} should be a supported output, that querying
673      * {@code #isOutputSupportedFor(Class)} should return {@code true}.</p>
674      *
675      * @param klass
676      *          a non-{@code null} {@link Class} object reference
677      * @return
678      *          an array of supported sizes for {@link ImageFormat#PRIVATE} format,
679      *          or {@code null} iff the {@code klass} is not a supported output.
680      *
681      *
682      * @throws NullPointerException if {@code klass} was {@code null}
683      *
684      * @see #isOutputSupportedFor(Class)
685      */
getOutputSizes(Class<T> klass)686     public <T> Size[] getOutputSizes(Class<T> klass) {
687         if (isOutputSupportedFor(klass) == false) {
688             return null;
689         }
690 
691         return getInternalFormatSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
692                 HAL_DATASPACE_UNKNOWN,/*output*/true, /*highRes*/false);
693     }
694 
695     /**
696      * Get a list of sizes compatible with the requested image {@code format}.
697      *
698      * <p>The {@code format} should be a supported format (one of the formats returned by
699      * {@link #getOutputFormats}).</p>
700      *
701      * As of API level 23, the {@link #getHighResolutionOutputSizes} method can be used on devices
702      * that support the
703      * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
704      * capability to get a list of high-resolution output sizes that cannot operate at the preferred
705      * 20fps rate. This means that for some supported formats, this method will return an empty
706      * list, if all the supported resolutions operate at below 20fps.  For devices that do not
707      * support the BURST_CAPTURE capability, all output resolutions are listed through this method.
708      *
709      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
710      * @return
711      *          an array of supported sizes,
712      *          or {@code null} if the {@code format} is not a supported output
713      *
714      * @see ImageFormat
715      * @see PixelFormat
716      * @see #getOutputFormats
717      */
getOutputSizes(int format)718     public Size[] getOutputSizes(int format) {
719         return getPublicFormatSizes(format, /*output*/true, /*highRes*/ false);
720     }
721 
722     /**
723      * Get a list of supported high speed video recording sizes.
724      * <p>
725      * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
726      * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
727      * list the supported high speed video size configurations. All the sizes listed will be a
728      * subset of the sizes reported by {@link #getOutputSizes} for processed non-stalling formats
729      * (typically {@link ImageFormat#PRIVATE} {@link ImageFormat#YUV_420_888}, etc.)
730      * </p>
731      * <p>
732      * To enable high speed video recording, application must create a constrained create high speed
733      * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
734      * a CaptureRequest list created by
735      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
736      * to this session. The application must select the video size from this method and
737      * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
738      * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
739      * generate the high speed request list. For example, if the application intends to do high
740      * speed recording, it can select the maximum size reported by this method to create high speed
741      * capture session. Note that for the use case of multiple output streams, application must
742      * select one unique size from this method to use (e.g., preview and recording streams must have
743      * the same size). Otherwise, the high speed session creation will fail. Once the size is
744      * selected, application can get the supported FPS ranges by
745      * {@link #getHighSpeedVideoFpsRangesFor}, and use these FPS ranges to setup the recording
746      * request lists via
747      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
748      * </p>
749      *
750      * @return an array of supported high speed video recording sizes
751      * @see #getHighSpeedVideoFpsRangesFor(Size)
752      * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
753      * @see CameraDevice#createConstrainedHighSpeedCaptureSession
754      * @see android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList
755      */
getHighSpeedVideoSizes()756     public Size[] getHighSpeedVideoSizes() {
757         Set<Size> keySet = mHighSpeedVideoSizeMap.keySet();
758         return keySet.toArray(new Size[keySet.size()]);
759     }
760 
761     /**
762      * Get the frame per second ranges (fpsMin, fpsMax) for input high speed video size.
763      * <p>
764      * See {@link #getHighSpeedVideoFpsRanges} for how to enable high speed recording.
765      * </p>
766      * <p>
767      * The {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS ranges} reported in this method
768      * must not be used to setup capture requests that are submitted to unconstrained capture
769      * sessions, or it will result in {@link IllegalArgumentException IllegalArgumentExceptions}.
770      * </p>
771      * <p>
772      * See {@link #getHighSpeedVideoFpsRanges} for the characteristics of the returned FPS ranges.
773      * </p>
774      *
775      * @param size one of the sizes returned by {@link #getHighSpeedVideoSizes()}
776      * @return an array of supported high speed video recording FPS ranges The upper bound of
777      *         returned ranges is guaranteed to be greater than or equal to 120.
778      * @throws IllegalArgumentException if input size does not exist in the return value of
779      *             getHighSpeedVideoSizes
780      * @see #getHighSpeedVideoSizes()
781      * @see #getHighSpeedVideoFpsRanges()
782      */
getHighSpeedVideoFpsRangesFor(Size size)783     public Range<Integer>[] getHighSpeedVideoFpsRangesFor(Size size) {
784         Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
785         if (fpsRangeCount == null || fpsRangeCount == 0) {
786             throw new IllegalArgumentException(String.format(
787                     "Size %s does not support high speed video recording", size));
788         }
789 
790         @SuppressWarnings("unchecked")
791         Range<Integer>[] fpsRanges = new Range[fpsRangeCount];
792         int i = 0;
793         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
794             if (size.equals(config.getSize())) {
795                 fpsRanges[i++] = config.getFpsRange();
796             }
797         }
798         return fpsRanges;
799     }
800 
801     /**
802      * Get a list of supported high speed video recording FPS ranges.
803      * <p>
804      * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
805      * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
806      * list the supported high speed video FPS range configurations. Application can then use
807      * {@link #getHighSpeedVideoSizesFor} to query available sizes for one of returned FPS range.
808      * </p>
809      * <p>
810      * To enable high speed video recording, application must create a constrained create high speed
811      * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
812      * a CaptureRequest list created by
813      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
814      * to this session. The application must select the video size from this method and
815      * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
816      * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
817      * generate the high speed request list. For example, if the application intends to do high
818      * speed recording, it can select one FPS range reported by this method, query the video sizes
819      * corresponding to this FPS range by {@link #getHighSpeedVideoSizesFor} and use one of reported
820      * sizes to create a high speed capture session. Note that for the use case of multiple output
821      * streams, application must select one unique size from this method to use (e.g., preview and
822      * recording streams must have the same size). Otherwise, the high speed session creation will
823      * fail. Once the high speed capture session is created, the application can set the FPS range
824      * in the recording request lists via
825      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
826      * </p>
827      * <p>
828      * The FPS ranges reported by this method will have below characteristics:
829      * <li>The fpsMin and fpsMax will be a multiple 30fps.</li>
830      * <li>The fpsMin will be no less than 30fps, the fpsMax will be no less than 120fps.</li>
831      * <li>At least one range will be a fixed FPS range where fpsMin == fpsMax.</li>
832      * <li>For each fixed FPS range, there will be one corresponding variable FPS range
833      * [30, fps_max] or [60, fps_max]. These kinds of FPS ranges are suitable for preview-only
834      * use cases where the application doesn't want the camera device always produce higher frame
835      * rate than the display refresh rate. Both 30fps and 60fps preview rate will not be
836      * supported for the same recording rate.</li>
837      * </p>
838      *
839      * @return an array of supported high speed video recording FPS ranges The upper bound of
840      *         returned ranges is guaranteed to be larger or equal to 120.
841      * @see #getHighSpeedVideoSizesFor
842      * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
843      * @see CameraDevice#createConstrainedHighSpeedCaptureSession
844      * @see android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList
845      */
846     @SuppressWarnings("unchecked")
getHighSpeedVideoFpsRanges()847     public Range<Integer>[] getHighSpeedVideoFpsRanges() {
848         Set<Range<Integer>> keySet = mHighSpeedVideoFpsRangeMap.keySet();
849         return keySet.toArray(new Range[keySet.size()]);
850     }
851 
852     /**
853      * Get the supported video sizes for an input high speed FPS range.
854      *
855      * <p> See {@link #getHighSpeedVideoSizes} for how to enable high speed recording.</p>
856      *
857      * @param fpsRange one of the FPS range returned by {@link #getHighSpeedVideoFpsRanges()}
858      * @return An array of video sizes to create high speed capture sessions for high speed streaming
859      *         use cases.
860      *
861      * @throws IllegalArgumentException if input FPS range does not exist in the return value of
862      *         getHighSpeedVideoFpsRanges
863      * @see #getHighSpeedVideoFpsRanges()
864      */
getHighSpeedVideoSizesFor(Range<Integer> fpsRange)865     public Size[] getHighSpeedVideoSizesFor(Range<Integer> fpsRange) {
866         Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
867         if (sizeCount == null || sizeCount == 0) {
868             throw new IllegalArgumentException(String.format(
869                     "FpsRange %s does not support high speed video recording", fpsRange));
870         }
871 
872         Size[] sizes = new Size[sizeCount];
873         int i = 0;
874         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
875             if (fpsRange.equals(config.getFpsRange())) {
876                 sizes[i++] = config.getSize();
877             }
878         }
879         return sizes;
880     }
881 
882     /**
883      * Get a list of supported high resolution sizes, which cannot operate at full BURST_CAPTURE
884      * rate.
885      *
886      * <p>This includes all output sizes that cannot meet the 20 fps frame rate requirements for the
887      * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
888      * capability.  This does not include the stall duration, so for example, a JPEG or RAW16 output
889      * resolution with a large stall duration but a minimum frame duration that's above 20 fps will
890      * still be listed in the regular {@link #getOutputSizes} list. All the sizes on this list that
891      * are less than 24 megapixels are still guaranteed to operate at a rate of at least 10 fps,
892      * not including stall duration. Sizes on this list that are at least 24 megapixels are allowed
893      * to operate at less than 10 fps.</p>
894      *
895      * <p>For a device that does not support the BURST_CAPTURE capability, this list will be
896      * {@code null}, since resolutions in the {@link #getOutputSizes} list are already not
897      * guaranteed to meet &gt;= 20 fps rate requirements. For a device that does support the
898      * BURST_CAPTURE capability, this list may be empty, if all supported resolutions meet the 20
899      * fps requirement.</p>
900      *
901      * @return an array of supported slower high-resolution sizes, or {@code null} if the
902      *         BURST_CAPTURE capability is not supported
903      */
getHighResolutionOutputSizes(int format)904     public Size[] getHighResolutionOutputSizes(int format) {
905         if (!mListHighResolution) return null;
906 
907         return getPublicFormatSizes(format, /*output*/true, /*highRes*/ true);
908     }
909 
910     /**
911      * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
912      * for the format/size combination (in nanoseconds).
913      *
914      * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
915      * <p>{@code size} should be one of the ones returned by
916      * {@link #getOutputSizes(int)}.</p>
917      *
918      * <p>This corresponds to the minimum frame duration (maximum frame rate) possible when only
919      * that stream is configured in a session, with all processing (typically in
920      * {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.  </p>
921      *
922      * <p>When multiple streams are used in a session, the minimum frame duration will be
923      * {@code max(individual stream min durations)}.  See {@link #getOutputStallDuration} for
924      * details of timing for formats that may cause frame rate slowdown when they are targeted by a
925      * capture request.</p>
926      *
927      * <p>For devices that do not support manual sensor control
928      * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
929      * this function may return 0.</p>
930      *
931      * <p>The minimum frame duration of a stream (of a particular format, size) is the same
932      * regardless of whether the stream is input or output.</p>
933      *
934      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
935      * @param size an output-compatible size
936      * @return a minimum frame duration {@code >} 0 in nanoseconds, or
937      *          0 if the minimum frame duration is not available.
938      *
939      * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
940      * @throws NullPointerException if {@code size} was {@code null}
941      *
942      * @see CaptureRequest#SENSOR_FRAME_DURATION
943      * @see #getOutputStallDuration(int, Size)
944      * @see ImageFormat
945      * @see PixelFormat
946      */
getOutputMinFrameDuration(int format, Size size)947     public long getOutputMinFrameDuration(int format, Size size) {
948         Objects.requireNonNull(size, "size must not be null");
949         checkArgumentFormatSupported(format, /*output*/true);
950 
951         return getInternalFormatDuration(imageFormatToInternal(format),
952                 imageFormatToDataspace(format),
953                 size,
954                 DURATION_MIN_FRAME);
955     }
956 
957     /**
958      * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
959      * for the class/size combination (in nanoseconds).
960      *
961      * <p>This assumes that the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
962      * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
963      *
964      * <p>{@code klass} should be one of the ones which is supported by
965      * {@link #isOutputSupportedFor(Class)}.</p>
966      *
967      * <p>{@code size} should be one of the ones returned by
968      * {@link #getOutputSizes(int)}.</p>
969      *
970      * <p>This corresponds to the minimum frame duration (maximum frame rate) possible when only
971      * that stream is configured in a session, with all processing (typically in
972      * {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.  </p>
973      *
974      * <p>When multiple streams are used in a session, the minimum frame duration will be
975      * {@code max(individual stream min durations)}.  See {@link #getOutputStallDuration} for
976      * details of timing for formats that may cause frame rate slowdown when they are targeted by a
977      * capture request.</p>
978      *
979      * <p>For devices that do not support manual sensor control
980      * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
981      * this function may return 0.</p>
982      *
983      * <p>The minimum frame duration of a stream (of a particular format, size) is the same
984      * regardless of whether the stream is input or output.</p>
985      *
986      * @param klass
987      *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
988      *          non-empty array returned by {@link #getOutputSizes(Class)}
989      * @param size an output-compatible size
990      * @return a minimum frame duration {@code >} 0 in nanoseconds, or
991      *          0 if the minimum frame duration is not available.
992      *
993      * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
994      * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
995      *
996      * @see CaptureRequest#SENSOR_FRAME_DURATION
997      * @see ImageFormat
998      * @see PixelFormat
999      */
getOutputMinFrameDuration(final Class<T> klass, final Size size)1000     public <T> long getOutputMinFrameDuration(final Class<T> klass, final Size size) {
1001         if (!isOutputSupportedFor(klass)) {
1002             throw new IllegalArgumentException("klass was not supported");
1003         }
1004 
1005         return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1006                 HAL_DATASPACE_UNKNOWN,
1007                 size, DURATION_MIN_FRAME);
1008     }
1009 
1010     /**
1011      * Get the stall duration for the format/size combination (in nanoseconds).
1012      *
1013      * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
1014      * <p>{@code size} should be one of the ones returned by
1015      * {@link #getOutputSizes(int)}.</p>
1016      *
1017      * <p>
1018      * A stall duration is how much extra time would get added to the normal minimum frame duration
1019      * for a repeating request that has streams with non-zero stall.
1020      *
1021      * <p>For example, consider JPEG captures which have the following characteristics:
1022      *
1023      * <ul>
1024      * <li>JPEG streams act like processed YUV streams in requests for which they are not included;
1025      * in requests in which they are directly referenced, they act as JPEG streams.
1026      * This is because supporting a JPEG stream requires the underlying YUV data to always be ready
1027      * for use by a JPEG encoder, but the encoder will only be used (and impact frame duration) on
1028      * requests that actually reference a JPEG stream.
1029      * <li>The JPEG processor can run concurrently to the rest of the camera pipeline, but cannot
1030      * process more than 1 capture at a time.
1031      * </ul>
1032      *
1033      * <p>In other words, using a repeating YUV request would result in a steady frame rate
1034      * (let's say it's 30 FPS). If a single JPEG request is submitted periodically,
1035      * the frame rate will stay at 30 FPS (as long as we wait for the previous JPEG to return each
1036      * time). If we try to submit a repeating YUV + JPEG request, then the frame rate will drop from
1037      * 30 FPS.</p>
1038      *
1039      * <p>In general, submitting a new request with a non-0 stall time stream will <em>not</em> cause a
1040      * frame rate drop unless there are still outstanding buffers for that stream from previous
1041      * requests.</p>
1042      *
1043      * <p>Submitting a repeating request with streams (call this {@code S}) is the same as setting
1044      * the minimum frame duration from the normal minimum frame duration corresponding to {@code S},
1045      * added with the maximum stall duration for {@code S}.</p>
1046      *
1047      * <p>If interleaving requests with and without a stall duration, a request will stall by the
1048      * maximum of the remaining times for each can-stall stream with outstanding buffers.</p>
1049      *
1050      * <p>This means that a stalling request will not have an exposure start until the stall has
1051      * completed.</p>
1052      *
1053      * <p>This should correspond to the stall duration when only that stream is active, with all
1054      * processing (typically in {@code android.*.mode}) set to {@code FAST} or {@code OFF}.
1055      * Setting any of the processing modes to {@code HIGH_QUALITY} effectively results in an
1056      * indeterminate stall duration for all streams in a request (the regular stall calculation
1057      * rules are ignored).</p>
1058      *
1059      * <p>The following formats may always have a stall duration:
1060      * <ul>
1061      * <li>{@link ImageFormat#JPEG JPEG}
1062      * <li>{@link ImageFormat#RAW_SENSOR RAW16}
1063      * <li>{@link ImageFormat#RAW_PRIVATE RAW_PRIVATE}
1064      * </ul>
1065      * </p>
1066      *
1067      * <p>The following formats will never have a stall duration:
1068      * <ul>
1069      * <li>{@link ImageFormat#YUV_420_888 YUV_420_888}
1070      * <li>{@link ImageFormat#PRIVATE PRIVATE}
1071      * </ul></p>
1072      *
1073      * <p>
1074      * All other formats may or may not have an allowed stall duration on a per-capability basis;
1075      * refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
1076      * android.request.availableCapabilities} for more details.</p>
1077      * </p>
1078      *
1079      * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
1080      * for more information about calculating the max frame rate (absent stalls).</p>
1081      *
1082      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
1083      * @param size an output-compatible size
1084      * @return a stall duration {@code >=} 0 in nanoseconds
1085      *
1086      * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
1087      * @throws NullPointerException if {@code size} was {@code null}
1088      *
1089      * @see CaptureRequest#SENSOR_FRAME_DURATION
1090      * @see ImageFormat
1091      * @see PixelFormat
1092      */
getOutputStallDuration(int format, Size size)1093     public long getOutputStallDuration(int format, Size size) {
1094         checkArgumentFormatSupported(format, /*output*/true);
1095 
1096         return getInternalFormatDuration(imageFormatToInternal(format),
1097                 imageFormatToDataspace(format),
1098                 size,
1099                 DURATION_STALL);
1100     }
1101 
1102     /**
1103      * Get the stall duration for the class/size combination (in nanoseconds).
1104      *
1105      * <p>This assumes that the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
1106      * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
1107      *
1108      * <p>{@code klass} should be one of the ones with a non-empty array returned by
1109      * {@link #getOutputSizes(Class)}.</p>
1110      *
1111      * <p>{@code size} should be one of the ones returned by
1112      * {@link #getOutputSizes(Class)}.</p>
1113      *
1114      * <p>See {@link #getOutputStallDuration(int, Size)} for a definition of a
1115      * <em>stall duration</em>.</p>
1116      *
1117      * @param klass
1118      *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
1119      *          non-empty array returned by {@link #getOutputSizes(Class)}
1120      * @param size an output-compatible size
1121      * @return a minimum frame duration {@code >=} 0 in nanoseconds
1122      *
1123      * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
1124      * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
1125      *
1126      * @see CaptureRequest#SENSOR_FRAME_DURATION
1127      * @see ImageFormat
1128      * @see PixelFormat
1129      */
getOutputStallDuration(final Class<T> klass, final Size size)1130     public <T> long getOutputStallDuration(final Class<T> klass, final Size size) {
1131         if (!isOutputSupportedFor(klass)) {
1132             throw new IllegalArgumentException("klass was not supported");
1133         }
1134 
1135         return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1136                 HAL_DATASPACE_UNKNOWN, size, DURATION_STALL);
1137     }
1138 
1139     /**
1140      * Check if this {@link StreamConfigurationMap} is equal to another
1141      * {@link StreamConfigurationMap}.
1142      *
1143      * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
1144      *
1145      * @return {@code true} if the objects were equal, {@code false} otherwise
1146      */
1147     @Override
equals(final Object obj)1148     public boolean equals(final Object obj) {
1149         if (obj == null) {
1150             return false;
1151         }
1152         if (this == obj) {
1153             return true;
1154         }
1155         if (obj instanceof StreamConfigurationMap) {
1156             final StreamConfigurationMap other = (StreamConfigurationMap) obj;
1157             // XX: do we care about order?
1158             return Arrays.equals(mConfigurations, other.mConfigurations) &&
1159                     Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) &&
1160                     Arrays.equals(mStallDurations, other.mStallDurations) &&
1161                     Arrays.equals(mDepthConfigurations, other.mDepthConfigurations) &&
1162                     Arrays.equals(mDepthMinFrameDurations, other.mDepthMinFrameDurations) &&
1163                     Arrays.equals(mDepthStallDurations, other.mDepthStallDurations) &&
1164                     Arrays.equals(mDynamicDepthConfigurations, other.mDynamicDepthConfigurations) &&
1165                     Arrays.equals(mDynamicDepthMinFrameDurations,
1166                             other.mDynamicDepthMinFrameDurations) &&
1167                     Arrays.equals(mDynamicDepthStallDurations, other.mDynamicDepthStallDurations) &&
1168                     Arrays.equals(mHeicConfigurations, other.mHeicConfigurations) &&
1169                     Arrays.equals(mHeicMinFrameDurations, other.mHeicMinFrameDurations) &&
1170                     Arrays.equals(mHeicStallDurations, other.mHeicStallDurations) &&
1171                     Arrays.equals(mJpegRConfigurations, other.mJpegRConfigurations) &&
1172                     Arrays.equals(mJpegRMinFrameDurations, other.mJpegRMinFrameDurations) &&
1173                     Arrays.equals(mJpegRStallDurations, other.mJpegRStallDurations) &&
1174                     Arrays.equals(mHighSpeedVideoConfigurations,
1175                             other.mHighSpeedVideoConfigurations);
1176         }
1177         return false;
1178     }
1179 
1180     /**
1181      * {@inheritDoc}
1182      */
1183     @Override
hashCode()1184     public int hashCode() {
1185         // XX: do we care about order?
1186         return HashCodeHelpers.hashCodeGeneric(
1187                 mConfigurations, mMinFrameDurations, mStallDurations,
1188                 mDepthConfigurations, mDepthMinFrameDurations, mDepthStallDurations,
1189                 mDynamicDepthConfigurations, mDynamicDepthMinFrameDurations,
1190                 mDynamicDepthStallDurations, mHeicConfigurations,
1191                 mHeicMinFrameDurations, mHeicStallDurations,
1192                 mJpegRConfigurations, mJpegRMinFrameDurations, mJpegRStallDurations,
1193                 mHighSpeedVideoConfigurations);
1194     }
1195 
1196     // Check that the argument is supported by #getOutputFormats or #getInputFormats
checkArgumentFormatSupported(int format, boolean output)1197     private int checkArgumentFormatSupported(int format, boolean output) {
1198         checkArgumentFormat(format);
1199 
1200         int internalFormat = imageFormatToInternal(format);
1201         int internalDataspace = imageFormatToDataspace(format);
1202 
1203         if (output) {
1204             if (internalDataspace == HAL_DATASPACE_DEPTH) {
1205                 if (mDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
1206                     return format;
1207                 }
1208             } else if (internalDataspace == HAL_DATASPACE_DYNAMIC_DEPTH) {
1209                 if (mDynamicDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
1210                     return format;
1211                 }
1212             } else if (internalDataspace == HAL_DATASPACE_HEIF) {
1213                 if (mHeicOutputFormats.indexOfKey(internalFormat) >= 0) {
1214                     return format;
1215                 }
1216             } else if (internalDataspace == HAL_DATASPACE_JPEG_R) {
1217                 if (mJpegROutputFormats.indexOfKey(internalFormat) >= 0) {
1218                     return format;
1219                 }
1220             } else {
1221                 if (mAllOutputFormats.indexOfKey(internalFormat) >= 0) {
1222                     return format;
1223                 }
1224             }
1225         } else {
1226             if (mInputFormats.indexOfKey(internalFormat) >= 0) {
1227                 return format;
1228             }
1229         }
1230 
1231         throw new IllegalArgumentException(String.format(
1232                 "format %x is not supported by this stream configuration map", format));
1233     }
1234 
1235     /**
1236      * Ensures that the format is either user-defined or implementation defined.
1237      *
1238      * <p>If a format has a different internal representation than the public representation,
1239      * passing in the public representation here will fail.</p>
1240      *
1241      * <p>For example if trying to use {@link ImageFormat#JPEG}:
1242      * it has a different public representation than the internal representation
1243      * {@code HAL_PIXEL_FORMAT_BLOB}, this check will fail.</p>
1244      *
1245      * <p>Any invalid/undefined formats will raise an exception.</p>
1246      *
1247      * @param format image format
1248      * @return the format
1249      *
1250      * @throws IllegalArgumentException if the format was invalid
1251      */
checkArgumentFormatInternal(int format)1252     static int checkArgumentFormatInternal(int format) {
1253         switch (format) {
1254             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1255             case HAL_PIXEL_FORMAT_BLOB:
1256             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1257             case HAL_PIXEL_FORMAT_Y16:
1258                 return format;
1259             case ImageFormat.JPEG:
1260             case ImageFormat.HEIC:
1261                 throw new IllegalArgumentException(
1262                         "An unknown internal format: " + format);
1263             default:
1264                 return checkArgumentFormat(format);
1265         }
1266     }
1267 
1268     /**
1269      * Ensures that the format is publicly user-defined in either ImageFormat or PixelFormat.
1270      *
1271      * <p>If a format has a different public representation than the internal representation,
1272      * passing in the internal representation here will fail.</p>
1273      *
1274      * <p>For example if trying to use {@code HAL_PIXEL_FORMAT_BLOB}:
1275      * it has a different internal representation than the public representation
1276      * {@link ImageFormat#JPEG}, this check will fail.</p>
1277      *
1278      * <p>Any invalid/undefined formats will raise an exception, including implementation-defined.
1279      * </p>
1280      *
1281      * <p>Note that {@code @hide} and deprecated formats will not pass this check.</p>
1282      *
1283      * @param format image format
1284      * @return the format
1285      *
1286      * @throws IllegalArgumentException if the format was not user-defined
1287      */
checkArgumentFormat(int format)1288     static int checkArgumentFormat(int format) {
1289         if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
1290             throw new IllegalArgumentException(String.format(
1291                     "format 0x%x was not defined in either ImageFormat or PixelFormat", format));
1292         }
1293 
1294         return format;
1295     }
1296 
1297     /**
1298      * Convert an internal format compatible with {@code graphics.h} into public-visible
1299      * {@code ImageFormat}. This assumes the dataspace of the format is not HAL_DATASPACE_DEPTH.
1300      *
1301      * <p>In particular these formats are converted:
1302      * <ul>
1303      * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.JPEG</li>
1304      * </ul>
1305      * </p>
1306      *
1307      * <p>Passing in a format which has no public equivalent will fail;
1308      * as will passing in a public format which has a different internal format equivalent.
1309      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1310      *
1311      * <p>All other formats are returned as-is, no further invalid check is performed.</p>
1312      *
1313      * <p>This function is the dual of {@link #imageFormatToInternal} for dataspaces other than
1314      * HAL_DATASPACE_DEPTH.</p>
1315      *
1316      * @param format image format from {@link ImageFormat} or {@link PixelFormat}
1317      * @return the converted image formats
1318      *
1319      * @throws IllegalArgumentException
1320      *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
1321      *          {@link ImageFormat#JPEG}
1322      *
1323      * @see ImageFormat
1324      * @see PixelFormat
1325      * @see #checkArgumentFormat
1326      * @hide
1327      */
imageFormatToPublic(int format)1328     public static int imageFormatToPublic(int format) {
1329         switch (format) {
1330             case HAL_PIXEL_FORMAT_BLOB:
1331                 return ImageFormat.JPEG;
1332             case ImageFormat.JPEG:
1333                 throw new IllegalArgumentException(
1334                         "ImageFormat.JPEG is an unknown internal format");
1335             default:
1336                 return format;
1337         }
1338     }
1339 
1340     /**
1341      * Convert an internal format compatible with {@code graphics.h} into public-visible
1342      * {@code ImageFormat}. This assumes the dataspace of the format is HAL_DATASPACE_DEPTH.
1343      *
1344      * <p>In particular these formats are converted:
1345      * <ul>
1346      * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.DEPTH_POINT_CLOUD
1347      * <li>HAL_PIXEL_FORMAT_Y16 => ImageFormat.DEPTH16
1348      * </ul>
1349      * </p>
1350      *
1351      * <p>Passing in an implementation-defined format which has no public equivalent will fail;
1352      * as will passing in a public format which has a different internal format equivalent.
1353      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1354      *
1355      * <p>All other formats are returned as-is, no further invalid check is performed.</p>
1356      *
1357      * <p>This function is the dual of {@link #imageFormatToInternal} for formats associated with
1358      * HAL_DATASPACE_DEPTH.</p>
1359      *
1360      * @param format image format from {@link ImageFormat} or {@link PixelFormat}
1361      * @return the converted image formats
1362      *
1363      * @throws IllegalArgumentException
1364      *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
1365      *          {@link ImageFormat#JPEG}
1366      *
1367      * @see ImageFormat
1368      * @see PixelFormat
1369      * @see #checkArgumentFormat
1370      * @hide
1371      */
depthFormatToPublic(int format)1372     public static int depthFormatToPublic(int format) {
1373         switch (format) {
1374             case HAL_PIXEL_FORMAT_BLOB:
1375                 return ImageFormat.DEPTH_POINT_CLOUD;
1376             case HAL_PIXEL_FORMAT_Y16:
1377                 return ImageFormat.DEPTH16;
1378             case HAL_PIXEL_FORMAT_RAW16:
1379                 return ImageFormat.RAW_DEPTH;
1380             case HAL_PIXEL_FORMAT_RAW10:
1381                 return ImageFormat.RAW_DEPTH10;
1382             case ImageFormat.JPEG:
1383                 throw new IllegalArgumentException(
1384                         "ImageFormat.JPEG is an unknown internal format");
1385             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1386                 throw new IllegalArgumentException(
1387                         "IMPLEMENTATION_DEFINED must not leak to public API");
1388             default:
1389                 throw new IllegalArgumentException(
1390                         "Unknown DATASPACE_DEPTH format " + format);
1391         }
1392     }
1393 
1394     /**
1395      * Convert image formats from internal to public formats (in-place).
1396      *
1397      * @param formats an array of image formats
1398      * @return {@code formats}
1399      *
1400      * @see #imageFormatToPublic
1401      */
imageFormatToPublic(int[] formats)1402     static int[] imageFormatToPublic(int[] formats) {
1403         if (formats == null) {
1404             return null;
1405         }
1406 
1407         for (int i = 0; i < formats.length; ++i) {
1408             formats[i] = imageFormatToPublic(formats[i]);
1409         }
1410 
1411         return formats;
1412     }
1413 
1414     /**
1415      * Convert a public format compatible with {@code ImageFormat} to an internal format
1416      * from {@code graphics.h}.
1417      *
1418      * <p>In particular these formats are converted:
1419      * <ul>
1420      * <li>ImageFormat.JPEG => HAL_PIXEL_FORMAT_BLOB
1421      * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_PIXEL_FORMAT_BLOB
1422      * <li>ImageFormat.DEPTH_JPEG => HAL_PIXEL_FORMAT_BLOB
1423      * <li>ImageFormat.HEIC => HAL_PIXEL_FORMAT_BLOB
1424      * <li>ImageFormat.JPEG_R => HAL_PIXEL_FORMAT_BLOB
1425      * <li>ImageFormat.DEPTH16 => HAL_PIXEL_FORMAT_Y16
1426      * </ul>
1427      * </p>
1428      *
1429      * <p>Passing in an internal format which has a different public format equivalent will fail.
1430      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1431      *
1432      * <p>All other formats are returned as-is, no invalid check is performed.</p>
1433      *
1434      * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1435      *
1436      * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1437      * @return the converted image formats
1438      *
1439      * @see ImageFormat
1440      * @see PixelFormat
1441      *
1442      * @throws IllegalArgumentException
1443      *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1444      */
imageFormatToInternal(int format)1445     static int imageFormatToInternal(int format) {
1446         switch (format) {
1447             case ImageFormat.JPEG:
1448             case ImageFormat.DEPTH_POINT_CLOUD:
1449             case ImageFormat.DEPTH_JPEG:
1450             case ImageFormat.HEIC:
1451             case ImageFormat.JPEG_R:
1452                 return HAL_PIXEL_FORMAT_BLOB;
1453             case ImageFormat.DEPTH16:
1454                 return HAL_PIXEL_FORMAT_Y16;
1455             case ImageFormat.RAW_DEPTH:
1456                 return HAL_PIXEL_FORMAT_RAW16;
1457             case ImageFormat.RAW_DEPTH10:
1458                 return HAL_PIXEL_FORMAT_RAW10;
1459             default:
1460                 return format;
1461         }
1462     }
1463 
1464     /**
1465      * Convert a public format compatible with {@code ImageFormat} to an internal dataspace
1466      * from {@code graphics.h}.
1467      *
1468      * <p>In particular these formats are converted:
1469      * <ul>
1470      * <li>ImageFormat.JPEG => HAL_DATASPACE_V0_JFIF
1471      * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_DATASPACE_DEPTH
1472      * <li>ImageFormat.DEPTH16 => HAL_DATASPACE_DEPTH
1473      * <li>ImageFormat.DEPTH_JPEG => HAL_DATASPACE_DYNAMIC_DEPTH
1474      * <li>ImageFormat.HEIC => HAL_DATASPACE_HEIF
1475      * <li>ImageFormat.JPEG_R => HAL_DATASPACE_JPEG_R
1476      * <li>ImageFormat.YUV_420_888 => HAL_DATASPACE_JFIF
1477      * <li>ImageFormat.RAW_SENSOR => HAL_DATASPACE_ARBITRARY
1478      * <li>ImageFormat.RAW_OPAQUE => HAL_DATASPACE_ARBITRARY
1479      * <li>ImageFormat.RAW10 => HAL_DATASPACE_ARBITRARY
1480      * <li>ImageFormat.RAW12 => HAL_DATASPACE_ARBITRARY
1481      * <li>others => HAL_DATASPACE_UNKNOWN
1482      * </ul>
1483      * </p>
1484      *
1485      * <p>Passing in an implementation-defined format here will fail (it's not a public format);
1486      * as will passing in an internal format which has a different public format equivalent.
1487      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1488      *
1489      * <p>All other formats are returned as-is, no invalid check is performed.</p>
1490      *
1491      * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1492      *
1493      * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1494      * @return the converted image formats
1495      *
1496      * @see ImageFormat
1497      * @see PixelFormat
1498      *
1499      * @throws IllegalArgumentException
1500      *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1501      */
imageFormatToDataspace(int format)1502     static int imageFormatToDataspace(int format) {
1503         switch (format) {
1504             case ImageFormat.JPEG:
1505                 return HAL_DATASPACE_V0_JFIF;
1506             case ImageFormat.DEPTH_POINT_CLOUD:
1507             case ImageFormat.DEPTH16:
1508             case ImageFormat.RAW_DEPTH:
1509             case ImageFormat.RAW_DEPTH10:
1510                 return HAL_DATASPACE_DEPTH;
1511             case ImageFormat.DEPTH_JPEG:
1512                 return HAL_DATASPACE_DYNAMIC_DEPTH;
1513             case ImageFormat.HEIC:
1514                 return HAL_DATASPACE_HEIF;
1515             case ImageFormat.JPEG_R:
1516                 return HAL_DATASPACE_JPEG_R;
1517             case ImageFormat.YUV_420_888:
1518                 return HAL_DATASPACE_JFIF;
1519             case ImageFormat.RAW_SENSOR:
1520             case ImageFormat.RAW_PRIVATE:
1521             case ImageFormat.RAW10:
1522             case ImageFormat.RAW12:
1523                 return HAL_DATASPACE_ARBITRARY;
1524             default:
1525                 return HAL_DATASPACE_UNKNOWN;
1526         }
1527     }
1528 
1529     /**
1530      * Convert image formats from public to internal formats (in-place).
1531      *
1532      * @param formats an array of image formats
1533      * @return {@code formats}
1534      *
1535      * @see #imageFormatToInternal
1536      *
1537      * @hide
1538      */
imageFormatToInternal(int[] formats)1539     public static int[] imageFormatToInternal(int[] formats) {
1540         if (formats == null) {
1541             return null;
1542         }
1543 
1544         for (int i = 0; i < formats.length; ++i) {
1545             formats[i] = imageFormatToInternal(formats[i]);
1546         }
1547 
1548         return formats;
1549     }
1550 
getPublicFormatSizes(int format, boolean output, boolean highRes)1551     private Size[] getPublicFormatSizes(int format, boolean output, boolean highRes) {
1552         try {
1553             checkArgumentFormatSupported(format, output);
1554         } catch (IllegalArgumentException e) {
1555             return null;
1556         }
1557 
1558         int internalFormat = imageFormatToInternal(format);
1559         int dataspace = imageFormatToDataspace(format);
1560 
1561         return getInternalFormatSizes(internalFormat, dataspace, output, highRes);
1562     }
1563 
getInternalFormatSizes(int format, int dataspace, boolean output, boolean highRes)1564     private Size[] getInternalFormatSizes(int format, int dataspace,
1565             boolean output, boolean highRes) {
1566         // All depth formats are non-high-res.
1567         if (dataspace == HAL_DATASPACE_DEPTH && highRes) {
1568             return new Size[0];
1569         }
1570 
1571         SparseIntArray formatsMap =
1572                 !output ? mInputFormats :
1573                 dataspace == HAL_DATASPACE_DEPTH ? mDepthOutputFormats :
1574                 dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthOutputFormats :
1575                 dataspace == HAL_DATASPACE_HEIF ? mHeicOutputFormats :
1576                 dataspace == HAL_DATASPACE_JPEG_R ? mJpegROutputFormats :
1577                 highRes ? mHighResOutputFormats :
1578                 mOutputFormats;
1579 
1580         int sizesCount = formatsMap.get(format);
1581         if ( ((!output || (dataspace == HAL_DATASPACE_DEPTH || dataspace == HAL_DATASPACE_JPEG_R ||
1582                             dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ||
1583                             dataspace == HAL_DATASPACE_HEIF)) && sizesCount == 0) ||
1584                 (output && (dataspace != HAL_DATASPACE_DEPTH && dataspace != HAL_DATASPACE_JPEG_R &&
1585                             dataspace != HAL_DATASPACE_DYNAMIC_DEPTH &&
1586                             dataspace != HAL_DATASPACE_HEIF) &&
1587                  mAllOutputFormats.get(format) == 0)) {
1588             return null;
1589         }
1590 
1591         Size[] sizes = new Size[sizesCount];
1592         int sizeIndex = 0;
1593 
1594         StreamConfiguration[] configurations =
1595                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations :
1596                 (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthConfigurations :
1597                 (dataspace == HAL_DATASPACE_HEIF) ? mHeicConfigurations :
1598                 (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRConfigurations :
1599                 mConfigurations;
1600         StreamConfigurationDuration[] minFrameDurations =
1601                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations :
1602                 (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthMinFrameDurations :
1603                 (dataspace == HAL_DATASPACE_HEIF) ? mHeicMinFrameDurations :
1604                 (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRMinFrameDurations :
1605                 mMinFrameDurations;
1606 
1607         for (StreamConfiguration config : configurations) {
1608             int fmt = config.getFormat();
1609             if (fmt == format && config.isOutput() == output) {
1610                 if (output && mListHighResolution) {
1611                     // Filter slow high-res output formats; include for
1612                     // highRes, remove for !highRes
1613                     long duration = 0;
1614                     for (int i = 0; i < minFrameDurations.length; i++) {
1615                         StreamConfigurationDuration d = minFrameDurations[i];
1616                         if (d.getFormat() == fmt &&
1617                                 d.getWidth() == config.getSize().getWidth() &&
1618                                 d.getHeight() == config.getSize().getHeight()) {
1619                             duration = d.getDuration();
1620                             break;
1621                         }
1622                     }
1623                     if (dataspace != HAL_DATASPACE_DEPTH &&
1624                             highRes != (duration > DURATION_20FPS_NS)) {
1625                         continue;
1626                     }
1627                 }
1628                 sizes[sizeIndex++] = config.getSize();
1629             }
1630         }
1631 
1632         // Dynamic depth streams can have both fast and also high res modes.
1633         if ((sizeIndex != sizesCount) && (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ||
1634                 dataspace == HAL_DATASPACE_HEIF) || (dataspace == HAL_DATASPACE_JPEG_R)) {
1635 
1636             if (sizeIndex > sizesCount) {
1637                 throw new AssertionError(
1638                         "Too many dynamic depth sizes (expected " + sizesCount + ", actual " +
1639                         sizeIndex + ")");
1640             }
1641 
1642             if (sizeIndex <= 0) {
1643                 sizes = new Size[0];
1644             } else {
1645                 sizes = Arrays.copyOf(sizes, sizeIndex);
1646             }
1647         } else if (sizeIndex != sizesCount) {
1648             throw new AssertionError(
1649                     "Too few sizes (expected " + sizesCount + ", actual " + sizeIndex + ")");
1650         }
1651 
1652         return sizes;
1653     }
1654 
1655     /** Get the list of publicly visible output formats */
getPublicFormats(boolean output)1656     private int[] getPublicFormats(boolean output) {
1657         int[] formats = new int[getPublicFormatCount(output)];
1658 
1659         int i = 0;
1660 
1661         SparseIntArray map = getFormatsMap(output);
1662         for (int j = 0; j < map.size(); j++) {
1663             int format = map.keyAt(j);
1664             formats[i++] = imageFormatToPublic(format);
1665         }
1666         if (output) {
1667             for (int j = 0; j < mDepthOutputFormats.size(); j++) {
1668                 formats[i++] = depthFormatToPublic(mDepthOutputFormats.keyAt(j));
1669             }
1670             if (mDynamicDepthOutputFormats.size() > 0) {
1671                 // Only one publicly dynamic depth format is available.
1672                 formats[i++] = ImageFormat.DEPTH_JPEG;
1673             }
1674             if (mHeicOutputFormats.size() > 0) {
1675                 formats[i++] = ImageFormat.HEIC;
1676             }
1677             if (mJpegROutputFormats.size() > 0) {
1678                 formats[i++] = ImageFormat.JPEG_R;
1679             }
1680         }
1681         if (formats.length != i) {
1682             throw new AssertionError("Too few formats " + i + ", expected " + formats.length);
1683         }
1684 
1685         return formats;
1686     }
1687 
1688     /** Get the format -> size count map for either output or input formats */
getFormatsMap(boolean output)1689     private SparseIntArray getFormatsMap(boolean output) {
1690         return output ? mAllOutputFormats : mInputFormats;
1691     }
1692 
getInternalFormatDuration(int format, int dataspace, Size size, int duration)1693     private long getInternalFormatDuration(int format, int dataspace, Size size, int duration) {
1694         // assume format is already checked, since its internal
1695 
1696         if (!isSupportedInternalConfiguration(format, dataspace, size)) {
1697             throw new IllegalArgumentException("size was not supported");
1698         }
1699 
1700         StreamConfigurationDuration[] durations = getDurations(duration, dataspace);
1701 
1702         for (StreamConfigurationDuration configurationDuration : durations) {
1703             if (configurationDuration.getFormat() == format &&
1704                     configurationDuration.getWidth() == size.getWidth() &&
1705                     configurationDuration.getHeight() == size.getHeight()) {
1706                 return configurationDuration.getDuration();
1707             }
1708         }
1709         // Default duration is '0' (unsupported/no extra stall)
1710         return 0;
1711     }
1712 
1713     /**
1714      * Get the durations array for the kind of duration
1715      *
1716      * @see #DURATION_MIN_FRAME
1717      * @see #DURATION_STALL
1718      * */
getDurations(int duration, int dataspace)1719     private StreamConfigurationDuration[] getDurations(int duration, int dataspace) {
1720         switch (duration) {
1721             case DURATION_MIN_FRAME:
1722                 return (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations :
1723                         (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ?
1724                         mDynamicDepthMinFrameDurations :
1725                         (dataspace == HAL_DATASPACE_HEIF) ? mHeicMinFrameDurations :
1726                         (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRMinFrameDurations :
1727                         mMinFrameDurations;
1728 
1729             case DURATION_STALL:
1730                 return (dataspace == HAL_DATASPACE_DEPTH) ? mDepthStallDurations :
1731                         (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthStallDurations :
1732                         (dataspace == HAL_DATASPACE_HEIF) ? mHeicStallDurations :
1733                         (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRStallDurations :
1734                         mStallDurations;
1735             default:
1736                 throw new IllegalArgumentException("duration was invalid");
1737         }
1738     }
1739 
1740     /** Count the number of publicly-visible output formats */
getPublicFormatCount(boolean output)1741     private int getPublicFormatCount(boolean output) {
1742         SparseIntArray formatsMap = getFormatsMap(output);
1743         int size = formatsMap.size();
1744         if (output) {
1745             size += mDepthOutputFormats.size();
1746             size += mDynamicDepthOutputFormats.size();
1747             size += mHeicOutputFormats.size();
1748             size += mJpegROutputFormats.size();
1749         }
1750 
1751         return size;
1752     }
1753 
arrayContains(T[] array, T element)1754     private static <T> boolean arrayContains(T[] array, T element) {
1755         if (array == null) {
1756             return false;
1757         }
1758 
1759         for (T el : array) {
1760             if (Objects.equals(el, element)) {
1761                 return true;
1762             }
1763         }
1764 
1765         return false;
1766     }
1767 
isSupportedInternalConfiguration(int format, int dataspace, Size size)1768     private boolean isSupportedInternalConfiguration(int format, int dataspace, Size size) {
1769         StreamConfiguration[] configurations =
1770                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations :
1771                 (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthConfigurations :
1772                 (dataspace == HAL_DATASPACE_HEIF) ? mHeicConfigurations :
1773                 (dataspace == HAL_DATASPACE_JPEG_R) ? mJpegRConfigurations :
1774                 mConfigurations;
1775 
1776         for (int i = 0; i < configurations.length; i++) {
1777             if (configurations[i].getFormat() == format &&
1778                     configurations[i].getSize().equals(size)) {
1779                 return true;
1780             }
1781         }
1782 
1783         return false;
1784     }
1785 
1786     /**
1787      * Return this {@link StreamConfigurationMap} as a string representation.
1788      *
1789      * <p>{@code "StreamConfigurationMap(Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d,
1790      * stall:%d], ... [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]), Inputs([w:%d, h:%d,
1791      * format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)]), ValidOutputFormatsForInput(
1792      * [in:%d, out:%d, ... %d], ... [in:%d, out:%d, ... %d]), HighSpeedVideoConfigurations(
1793      * [w:%d, h:%d, min_fps:%d, max_fps:%d], ... [w:%d, h:%d, min_fps:%d, max_fps:%d]))"}.</p>
1794      *
1795      * <p>{@code Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d], ...
1796      * [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d])}, where
1797      * {@code [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]} represents an output
1798      * configuration's width, height, format, minimal frame duration in nanoseconds, and stall
1799      * duration in nanoseconds.</p>
1800      *
1801      * <p>{@code Inputs([w:%d, h:%d, format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)])}, where
1802      * {@code [w:%d, h:%d, format:%s(%d)]} represents an input configuration's width, height, and
1803      * format.</p>
1804      *
1805      * <p>{@code ValidOutputFormatsForInput([in:%s(%d), out:%s(%d), ... %s(%d)],
1806      * ... [in:%s(%d), out:%s(%d), ... %s(%d)])}, where {@code [in:%s(%d), out:%s(%d), ... %s(%d)]}
1807      * represents an input format and its valid output formats.</p>
1808      *
1809      * <p>{@code HighSpeedVideoConfigurations([w:%d, h:%d, min_fps:%d, max_fps:%d],
1810      * ... [w:%d, h:%d, min_fps:%d, max_fps:%d])}, where
1811      * {@code [w:%d, h:%d, min_fps:%d, max_fps:%d]} represents a high speed video output
1812      * configuration's width, height, minimal frame rate, and maximal frame rate.</p>
1813      *
1814      * @return string representation of {@link StreamConfigurationMap}
1815      */
1816     @Override
toString()1817     public String toString() {
1818         StringBuilder sb = new StringBuilder("StreamConfiguration(");
1819         appendOutputsString(sb);
1820         sb.append(", ");
1821         appendHighResOutputsString(sb);
1822         sb.append(", ");
1823         appendInputsString(sb);
1824         sb.append(", ");
1825         appendValidOutputFormatsForInputString(sb);
1826         sb.append(", ");
1827         appendHighSpeedVideoConfigurationsString(sb);
1828         sb.append(")");
1829 
1830         return sb.toString();
1831     }
1832 
1833     /**
1834      * Size comparison method used by size comparators.
1835      *
1836      * @hide
1837      */
compareSizes(int widthA, int heightA, int widthB, int heightB)1838     public static int compareSizes(int widthA, int heightA, int widthB, int heightB) {
1839         long left = widthA * (long) heightA;
1840         long right = widthB * (long) heightB;
1841         if (left == right) {
1842             left = widthA;
1843             right = widthB;
1844         }
1845         return (left < right) ? -1 : (left > right ? 1 : 0);
1846     }
1847 
appendOutputsString(StringBuilder sb)1848     private void appendOutputsString(StringBuilder sb) {
1849         sb.append("Outputs(");
1850         int[] formats = getOutputFormats();
1851         for (int format : formats) {
1852             Size[] sizes = getOutputSizes(format);
1853             for (Size size : sizes) {
1854                 long minFrameDuration = getOutputMinFrameDuration(format, size);
1855                 long stallDuration = getOutputStallDuration(format, size);
1856                 sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
1857                         "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
1858                         format, minFrameDuration, stallDuration));
1859             }
1860         }
1861         // Remove the pending ", "
1862         if (sb.charAt(sb.length() - 1) == ' ') {
1863             sb.delete(sb.length() - 2, sb.length());
1864         }
1865         sb.append(")");
1866     }
1867 
appendHighResOutputsString(StringBuilder sb)1868     private void appendHighResOutputsString(StringBuilder sb) {
1869         sb.append("HighResolutionOutputs(");
1870         int[] formats = getOutputFormats();
1871         for (int format : formats) {
1872             Size[] sizes = getHighResolutionOutputSizes(format);
1873             if (sizes == null) continue;
1874             for (Size size : sizes) {
1875                 long minFrameDuration = getOutputMinFrameDuration(format, size);
1876                 long stallDuration = getOutputStallDuration(format, size);
1877                 sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
1878                         "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
1879                         format, minFrameDuration, stallDuration));
1880             }
1881         }
1882         // Remove the pending ", "
1883         if (sb.charAt(sb.length() - 1) == ' ') {
1884             sb.delete(sb.length() - 2, sb.length());
1885         }
1886         sb.append(")");
1887     }
1888 
appendInputsString(StringBuilder sb)1889     private void appendInputsString(StringBuilder sb) {
1890         sb.append("Inputs(");
1891         int[] formats = getInputFormats();
1892         for (int format : formats) {
1893             Size[] sizes = getInputSizes(format);
1894             for (Size size : sizes) {
1895                 sb.append(String.format("[w:%d, h:%d, format:%s(%d)], ", size.getWidth(),
1896                         size.getHeight(), formatToString(format), format));
1897             }
1898         }
1899         // Remove the pending ", "
1900         if (sb.charAt(sb.length() - 1) == ' ') {
1901             sb.delete(sb.length() - 2, sb.length());
1902         }
1903         sb.append(")");
1904     }
1905 
appendValidOutputFormatsForInputString(StringBuilder sb)1906     private void appendValidOutputFormatsForInputString(StringBuilder sb) {
1907         sb.append("ValidOutputFormatsForInput(");
1908         int[] inputFormats = getInputFormats();
1909         for (int inputFormat : inputFormats) {
1910             sb.append(String.format("[in:%s(%d), out:", formatToString(inputFormat), inputFormat));
1911             int[] outputFormats = getValidOutputFormatsForInput(inputFormat);
1912             for (int i = 0; i < outputFormats.length; i++) {
1913                 sb.append(String.format("%s(%d)", formatToString(outputFormats[i]),
1914                         outputFormats[i]));
1915                 if (i < outputFormats.length - 1) {
1916                     sb.append(", ");
1917                 }
1918             }
1919             sb.append("], ");
1920         }
1921         // Remove the pending ", "
1922         if (sb.charAt(sb.length() - 1) == ' ') {
1923             sb.delete(sb.length() - 2, sb.length());
1924         }
1925         sb.append(")");
1926     }
1927 
appendHighSpeedVideoConfigurationsString(StringBuilder sb)1928     private void appendHighSpeedVideoConfigurationsString(StringBuilder sb) {
1929         sb.append("HighSpeedVideoConfigurations(");
1930         Size[] sizes = getHighSpeedVideoSizes();
1931         for (Size size : sizes) {
1932             Range<Integer>[] ranges = getHighSpeedVideoFpsRangesFor(size);
1933             for (Range<Integer> range : ranges) {
1934                 sb.append(String.format("[w:%d, h:%d, min_fps:%d, max_fps:%d], ", size.getWidth(),
1935                         size.getHeight(), range.getLower(), range.getUpper()));
1936             }
1937         }
1938         // Remove the pending ", "
1939         if (sb.charAt(sb.length() - 1) == ' ') {
1940             sb.delete(sb.length() - 2, sb.length());
1941         }
1942         sb.append(")");
1943     }
1944 
1945     /**
1946      * @hide
1947      */
formatToString(int format)1948     public static String formatToString(int format) {
1949         switch (format) {
1950             case ImageFormat.YV12:
1951                 return "YV12";
1952             case ImageFormat.YUV_420_888:
1953                 return "YUV_420_888";
1954             case ImageFormat.NV21:
1955                 return "NV21";
1956             case ImageFormat.NV16:
1957                 return "NV16";
1958             case PixelFormat.RGB_565:
1959                 return "RGB_565";
1960             case PixelFormat.RGBA_8888:
1961                 return "RGBA_8888";
1962             case PixelFormat.RGBX_8888:
1963                 return "RGBX_8888";
1964             case PixelFormat.RGB_888:
1965                 return "RGB_888";
1966             case ImageFormat.JPEG:
1967                 return "JPEG";
1968             case ImageFormat.YUY2:
1969                 return "YUY2";
1970             case ImageFormat.Y8:
1971                 return "Y8";
1972             case ImageFormat.Y16:
1973                 return "Y16";
1974             case ImageFormat.RAW_SENSOR:
1975                 return "RAW_SENSOR";
1976             case ImageFormat.RAW_PRIVATE:
1977                 return "RAW_PRIVATE";
1978             case ImageFormat.RAW10:
1979                 return "RAW10";
1980             case ImageFormat.DEPTH16:
1981                 return "DEPTH16";
1982             case ImageFormat.DEPTH_POINT_CLOUD:
1983                 return "DEPTH_POINT_CLOUD";
1984             case ImageFormat.DEPTH_JPEG:
1985                 return "DEPTH_JPEG";
1986             case ImageFormat.RAW_DEPTH:
1987                 return "RAW_DEPTH";
1988             case ImageFormat.RAW_DEPTH10:
1989                 return "RAW_DEPTH10";
1990             case ImageFormat.PRIVATE:
1991                 return "PRIVATE";
1992             case ImageFormat.HEIC:
1993                 return "HEIC";
1994             case ImageFormat.JPEG_R:
1995                 return "JPEG/R";
1996             default:
1997                 return "UNKNOWN";
1998         }
1999     }
2000 
2001     // from system/core/include/system/graphics.h
2002     private static final int HAL_PIXEL_FORMAT_RAW16 = 0x20;
2003     /** @hide */
2004     public static final int HAL_PIXEL_FORMAT_BLOB = 0x21;
2005     private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
2006     private static final int HAL_PIXEL_FORMAT_YCbCr_420_888 = 0x23;
2007     private static final int HAL_PIXEL_FORMAT_RAW_OPAQUE = 0x24;
2008     private static final int HAL_PIXEL_FORMAT_RAW10 = 0x25;
2009     private static final int HAL_PIXEL_FORMAT_RAW12 = 0x26;
2010     private static final int HAL_PIXEL_FORMAT_Y16 = 0x20363159;
2011 
2012 
2013     private static final int HAL_DATASPACE_STANDARD_SHIFT = 16;
2014     private static final int HAL_DATASPACE_TRANSFER_SHIFT = 22;
2015     private static final int HAL_DATASPACE_RANGE_SHIFT = 27;
2016 
2017     private static final int HAL_DATASPACE_UNKNOWN = 0x0;
2018 
2019     /**
2020      * @hide
2021      */
2022     public static final int HAL_DATASPACE_ARBITRARY = 0x1;
2023 
2024     /** @hide */
2025     public static final int HAL_DATASPACE_V0_JFIF =
2026             (2 << HAL_DATASPACE_STANDARD_SHIFT) |
2027             (3 << HAL_DATASPACE_TRANSFER_SHIFT) |
2028             (1 << HAL_DATASPACE_RANGE_SHIFT);
2029 
2030     /**
2031      * @hide
2032      */
2033     public static final int HAL_DATASPACE_DEPTH = 0x1000;
2034     /**
2035      * @hide
2036      */
2037     public static final int HAL_DATASPACE_DYNAMIC_DEPTH = 0x1002;
2038     /**
2039      * @hide
2040      */
2041     public static final int HAL_DATASPACE_HEIF = 0x1004;
2042     /**
2043      * @hide
2044      */
2045     public static final int HAL_DATASPACE_JPEG_R = 0x1005;
2046     /**
2047      * @hide
2048      */
2049     public static final int HAL_DATASPACE_JFIF = 0x8C20000;
2050     private static final long DURATION_20FPS_NS = 50000000L;
2051     /**
2052      * @see #getDurations(int, int)
2053      */
2054     private static final int DURATION_MIN_FRAME = 0;
2055     private static final int DURATION_STALL = 1;
2056 
2057     private final StreamConfiguration[] mConfigurations;
2058     private final StreamConfigurationDuration[] mMinFrameDurations;
2059     private final StreamConfigurationDuration[] mStallDurations;
2060 
2061     private final StreamConfiguration[] mDepthConfigurations;
2062     private final StreamConfigurationDuration[] mDepthMinFrameDurations;
2063     private final StreamConfigurationDuration[] mDepthStallDurations;
2064 
2065     private final StreamConfiguration[] mDynamicDepthConfigurations;
2066     private final StreamConfigurationDuration[] mDynamicDepthMinFrameDurations;
2067     private final StreamConfigurationDuration[] mDynamicDepthStallDurations;
2068 
2069     private final StreamConfiguration[] mHeicConfigurations;
2070     private final StreamConfigurationDuration[] mHeicMinFrameDurations;
2071     private final StreamConfigurationDuration[] mHeicStallDurations;
2072 
2073     private final StreamConfiguration[] mJpegRConfigurations;
2074     private final StreamConfigurationDuration[] mJpegRMinFrameDurations;
2075     private final StreamConfigurationDuration[] mJpegRStallDurations;
2076 
2077     private final HighSpeedVideoConfiguration[] mHighSpeedVideoConfigurations;
2078     private final ReprocessFormatsMap mInputOutputFormatsMap;
2079 
2080     private final boolean mListHighResolution;
2081 
2082     /** internal format -> num output sizes mapping, not including slow high-res sizes, for
2083      * non-depth dataspaces */
2084     private final SparseIntArray mOutputFormats = new SparseIntArray();
2085     /** internal format -> num output sizes mapping for slow high-res sizes, for non-depth
2086      * dataspaces */
2087     private final SparseIntArray mHighResOutputFormats = new SparseIntArray();
2088     /** internal format -> num output sizes mapping for all non-depth dataspaces */
2089     private final SparseIntArray mAllOutputFormats = new SparseIntArray();
2090     /** internal format -> num input sizes mapping, for input reprocessing formats */
2091     private final SparseIntArray mInputFormats = new SparseIntArray();
2092     /** internal format -> num depth output sizes mapping, for HAL_DATASPACE_DEPTH */
2093     private final SparseIntArray mDepthOutputFormats = new SparseIntArray();
2094     /** internal format -> num dynamic depth output sizes mapping, for HAL_DATASPACE_DYNAMIC_DEPTH */
2095     private final SparseIntArray mDynamicDepthOutputFormats = new SparseIntArray();
2096     /** internal format -> num heic output sizes mapping, for HAL_DATASPACE_HEIF */
2097     private final SparseIntArray mHeicOutputFormats = new SparseIntArray();
2098     /** internal format -> num Jpeg/R output sizes mapping, for HAL_DATASPACE_JPEG_R */
2099     private final SparseIntArray mJpegROutputFormats = new SparseIntArray();
2100 
2101     /** High speed video Size -> FPS range count mapping*/
2102     private final HashMap</*HighSpeedVideoSize*/Size, /*Count*/Integer> mHighSpeedVideoSizeMap =
2103             new HashMap<Size, Integer>();
2104     /** High speed video FPS range -> Size count mapping*/
2105     private final HashMap</*HighSpeedVideoFpsRange*/Range<Integer>, /*Count*/Integer>
2106             mHighSpeedVideoFpsRangeMap = new HashMap<Range<Integer>, Integer>();
2107 
2108 }
2109