• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.params;
18 
19 import static com.android.internal.util.Preconditions.checkArrayElementsNotNull;
20 
21 import android.graphics.ImageFormat;
22 import android.graphics.PixelFormat;
23 import android.hardware.camera2.CameraCharacteristics;
24 import android.hardware.camera2.CameraDevice;
25 import android.hardware.camera2.CameraMetadata;
26 import android.hardware.camera2.CaptureRequest;
27 import android.hardware.camera2.utils.HashCodeHelpers;
28 import android.hardware.camera2.utils.SurfaceUtils;
29 import android.util.Range;
30 import android.util.Size;
31 import android.util.SparseIntArray;
32 import android.view.Surface;
33 
34 import java.util.Arrays;
35 import java.util.HashMap;
36 import java.util.Objects;
37 import java.util.Set;
38 
39 /**
40  * Immutable class to store the available stream
41  * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP configurations} to set up
42  * {@link android.view.Surface Surfaces} for creating a
43  * {@link android.hardware.camera2.CameraCaptureSession capture session} with
44  * {@link android.hardware.camera2.CameraDevice#createCaptureSession}.
45  * <!-- TODO: link to input stream configuration -->
46  *
47  * <p>This is the authoritative list for all <!-- input/ -->output formats (and sizes respectively
48  * for that format) that are supported by a camera device.</p>
49  *
50  * <p>This also contains the minimum frame durations and stall durations for each format/size
51  * combination that can be used to calculate effective frame rate when submitting multiple captures.
52  * </p>
53  *
54  * <p>An instance of this object is available from {@link CameraCharacteristics} using
55  * the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP} key and the
56  * {@link CameraCharacteristics#get} method.</p>
57  *
58  * <pre><code>{@code
59  * CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
60  * StreamConfigurationMap configs = characteristics.get(
61  *         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
62  * }</code></pre>
63  *
64  * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
65  * @see CameraDevice#createCaptureSession
66  */
67 public final class StreamConfigurationMap {
68 
69     private static final String TAG = "StreamConfigurationMap";
70 
71     private static final int MAX_DIMEN_FOR_ROUNDING = 1920; // maximum allowed width for rounding
72 
73     /**
74      * Create a new {@link StreamConfigurationMap}.
75      *
76      * <p>The array parameters ownership is passed to this object after creation; do not
77      * write to them after this constructor is invoked.</p>
78      *
79      * @param configurations a non-{@code null} array of {@link StreamConfiguration}
80      * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
81      * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
82      * @param depthConfigurations a non-{@code null} array of depth {@link StreamConfiguration}
83      * @param depthMinFrameDurations a non-{@code null} array of depth
84      *        {@link StreamConfigurationDuration}
85      * @param depthStallDurations a non-{@code null} array of depth
86      *        {@link StreamConfigurationDuration}
87      * @param dynamicDepthConfigurations a non-{@code null} array of dynamic depth
88      *        {@link StreamConfiguration}
89      * @param dynamicDepthMinFrameDurations a non-{@code null} array of dynamic depth
90      *        {@link StreamConfigurationDuration}
91      * @param dynamicDepthStallDurations a non-{@code null} array of dynamic depth
92      *        {@link StreamConfigurationDuration}
93      * @param heicConfigurations a non-{@code null} array of heic {@link StreamConfiguration}
94      * @param heicMinFrameDurations a non-{@code null} array of heic
95      *        {@link StreamConfigurationDuration}
96      * @param heicStallDurations a non-{@code null} array of heic
97      *        {@link StreamConfigurationDuration}
98      * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
99      *        camera device does not support high speed video recording
100      * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
101      *        and thus needs a separate list of slow high-resolution output sizes
102      * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations
103      *         were {@code null} or any subelements were {@code null}
104      *
105      * @hide
106      */
StreamConfigurationMap( StreamConfiguration[] configurations, StreamConfigurationDuration[] minFrameDurations, StreamConfigurationDuration[] stallDurations, StreamConfiguration[] depthConfigurations, StreamConfigurationDuration[] depthMinFrameDurations, StreamConfigurationDuration[] depthStallDurations, StreamConfiguration[] dynamicDepthConfigurations, StreamConfigurationDuration[] dynamicDepthMinFrameDurations, StreamConfigurationDuration[] dynamicDepthStallDurations, StreamConfiguration[] heicConfigurations, StreamConfigurationDuration[] heicMinFrameDurations, StreamConfigurationDuration[] heicStallDurations, HighSpeedVideoConfiguration[] highSpeedVideoConfigurations, ReprocessFormatsMap inputOutputFormatsMap, boolean listHighResolution)107     public StreamConfigurationMap(
108             StreamConfiguration[] configurations,
109             StreamConfigurationDuration[] minFrameDurations,
110             StreamConfigurationDuration[] stallDurations,
111             StreamConfiguration[] depthConfigurations,
112             StreamConfigurationDuration[] depthMinFrameDurations,
113             StreamConfigurationDuration[] depthStallDurations,
114             StreamConfiguration[] dynamicDepthConfigurations,
115             StreamConfigurationDuration[] dynamicDepthMinFrameDurations,
116             StreamConfigurationDuration[] dynamicDepthStallDurations,
117             StreamConfiguration[] heicConfigurations,
118             StreamConfigurationDuration[] heicMinFrameDurations,
119             StreamConfigurationDuration[] heicStallDurations,
120             HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
121             ReprocessFormatsMap inputOutputFormatsMap,
122             boolean listHighResolution) {
123         this(configurations, minFrameDurations, stallDurations,
124                     depthConfigurations, depthMinFrameDurations, depthStallDurations,
125                     dynamicDepthConfigurations, dynamicDepthMinFrameDurations,
126                     dynamicDepthStallDurations,
127                     heicConfigurations, heicMinFrameDurations, heicStallDurations,
128                     highSpeedVideoConfigurations, inputOutputFormatsMap, listHighResolution,
129                     /*enforceImplementationDefined*/ true);
130     }
131 
132     /**
133      * Create a new {@link StreamConfigurationMap}.
134      *
135      * <p>The array parameters ownership is passed to this object after creation; do not
136      * write to them after this constructor is invoked.</p>
137      *
138      * @param configurations a non-{@code null} array of {@link StreamConfiguration}
139      * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
140      * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
141      * @param depthConfigurations a non-{@code null} array of depth {@link StreamConfiguration}
142      * @param depthMinFrameDurations a non-{@code null} array of depth
143      *        {@link StreamConfigurationDuration}
144      * @param depthStallDurations a non-{@code null} array of depth
145      *        {@link StreamConfigurationDuration}
146      * @param dynamicDepthConfigurations a non-{@code null} array of dynamic depth
147      *        {@link StreamConfiguration}
148      * @param dynamicDepthMinFrameDurations a non-{@code null} array of dynamic depth
149      *        {@link StreamConfigurationDuration}
150      * @param dynamicDepthStallDurations a non-{@code null} array of dynamic depth
151      *        {@link StreamConfigurationDuration}
152      * @param heicConfigurations a non-{@code null} array of heic {@link StreamConfiguration}
153      * @param heicMinFrameDurations a non-{@code null} array of heic
154      *        {@link StreamConfigurationDuration}
155      * @param heicStallDurations a non-{@code null} array of heic
156      *        {@link StreamConfigurationDuration}
157      * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
158      *        camera device does not support high speed video recording
159      * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
160      *        and thus needs a separate list of slow high-resolution output sizes
161      * @param enforceImplementationDefined a flag indicating whether
162      *        IMPLEMENTATION_DEFINED format configuration must be present
163      * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations
164      *         were {@code null} or any subelements were {@code null}
165      *
166      * @hide
167      */
StreamConfigurationMap( StreamConfiguration[] configurations, StreamConfigurationDuration[] minFrameDurations, StreamConfigurationDuration[] stallDurations, StreamConfiguration[] depthConfigurations, StreamConfigurationDuration[] depthMinFrameDurations, StreamConfigurationDuration[] depthStallDurations, StreamConfiguration[] dynamicDepthConfigurations, StreamConfigurationDuration[] dynamicDepthMinFrameDurations, StreamConfigurationDuration[] dynamicDepthStallDurations, StreamConfiguration[] heicConfigurations, StreamConfigurationDuration[] heicMinFrameDurations, StreamConfigurationDuration[] heicStallDurations, HighSpeedVideoConfiguration[] highSpeedVideoConfigurations, ReprocessFormatsMap inputOutputFormatsMap, boolean listHighResolution, boolean enforceImplementationDefined)168     public StreamConfigurationMap(
169             StreamConfiguration[] configurations,
170             StreamConfigurationDuration[] minFrameDurations,
171             StreamConfigurationDuration[] stallDurations,
172             StreamConfiguration[] depthConfigurations,
173             StreamConfigurationDuration[] depthMinFrameDurations,
174             StreamConfigurationDuration[] depthStallDurations,
175             StreamConfiguration[] dynamicDepthConfigurations,
176             StreamConfigurationDuration[] dynamicDepthMinFrameDurations,
177             StreamConfigurationDuration[] dynamicDepthStallDurations,
178             StreamConfiguration[] heicConfigurations,
179             StreamConfigurationDuration[] heicMinFrameDurations,
180             StreamConfigurationDuration[] heicStallDurations,
181             HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
182             ReprocessFormatsMap inputOutputFormatsMap,
183             boolean listHighResolution,
184             boolean enforceImplementationDefined) {
185 
186         if (configurations == null &&
187                 depthConfigurations == null &&
188                 heicConfigurations == null) {
189             throw new NullPointerException("At least one of color/depth/heic configurations " +
190                     "must not be null");
191         }
192 
193         if (configurations == null) {
194             // If no color configurations exist, ensure depth ones do
195             mConfigurations = new StreamConfiguration[0];
196             mMinFrameDurations = new StreamConfigurationDuration[0];
197             mStallDurations = new StreamConfigurationDuration[0];
198         } else {
199             mConfigurations = checkArrayElementsNotNull(configurations, "configurations");
200             mMinFrameDurations = checkArrayElementsNotNull(minFrameDurations, "minFrameDurations");
201             mStallDurations = checkArrayElementsNotNull(stallDurations, "stallDurations");
202         }
203 
204         mListHighResolution = listHighResolution;
205 
206         if (depthConfigurations == null) {
207             mDepthConfigurations = new StreamConfiguration[0];
208             mDepthMinFrameDurations = new StreamConfigurationDuration[0];
209             mDepthStallDurations = new StreamConfigurationDuration[0];
210         } else {
211             mDepthConfigurations = checkArrayElementsNotNull(depthConfigurations,
212                     "depthConfigurations");
213             mDepthMinFrameDurations = checkArrayElementsNotNull(depthMinFrameDurations,
214                     "depthMinFrameDurations");
215             mDepthStallDurations = checkArrayElementsNotNull(depthStallDurations,
216                     "depthStallDurations");
217         }
218 
219         if (dynamicDepthConfigurations == null) {
220             mDynamicDepthConfigurations = new StreamConfiguration[0];
221             mDynamicDepthMinFrameDurations = new StreamConfigurationDuration[0];
222             mDynamicDepthStallDurations = new StreamConfigurationDuration[0];
223         } else {
224             mDynamicDepthConfigurations = checkArrayElementsNotNull(dynamicDepthConfigurations,
225                     "dynamicDepthConfigurations");
226             mDynamicDepthMinFrameDurations = checkArrayElementsNotNull(
227                     dynamicDepthMinFrameDurations, "dynamicDepthMinFrameDurations");
228             mDynamicDepthStallDurations = checkArrayElementsNotNull(dynamicDepthStallDurations,
229                     "dynamicDepthStallDurations");
230         }
231 
232         if (heicConfigurations == null) {
233             mHeicConfigurations = new StreamConfiguration[0];
234             mHeicMinFrameDurations = new StreamConfigurationDuration[0];
235             mHeicStallDurations = new StreamConfigurationDuration[0];
236         } else {
237             mHeicConfigurations = checkArrayElementsNotNull(heicConfigurations,
238                     "heicConfigurations");
239             mHeicMinFrameDurations = checkArrayElementsNotNull(heicMinFrameDurations,
240                     "heicMinFrameDurations");
241             mHeicStallDurations = checkArrayElementsNotNull(heicStallDurations,
242                     "heicStallDurations");
243         }
244 
245         if (highSpeedVideoConfigurations == null) {
246             mHighSpeedVideoConfigurations = new HighSpeedVideoConfiguration[0];
247         } else {
248             mHighSpeedVideoConfigurations = checkArrayElementsNotNull(
249                     highSpeedVideoConfigurations, "highSpeedVideoConfigurations");
250         }
251 
252         // For each format, track how many sizes there are available to configure
253         for (StreamConfiguration config : mConfigurations) {
254             int fmt = config.getFormat();
255             SparseIntArray map = null;
256             if (config.isOutput()) {
257                 mAllOutputFormats.put(fmt, mAllOutputFormats.get(fmt) + 1);
258                 long duration = 0;
259                 if (mListHighResolution) {
260                     for (StreamConfigurationDuration configurationDuration : mMinFrameDurations) {
261                         if (configurationDuration.getFormat() == fmt &&
262                                 configurationDuration.getWidth() == config.getSize().getWidth() &&
263                                 configurationDuration.getHeight() == config.getSize().getHeight()) {
264                             duration = configurationDuration.getDuration();
265                             break;
266                         }
267                     }
268                 }
269                 map = duration <= DURATION_20FPS_NS ?
270                         mOutputFormats : mHighResOutputFormats;
271             } else {
272                 map = mInputFormats;
273             }
274             map.put(fmt, map.get(fmt) + 1);
275         }
276 
277         // For each depth format, track how many sizes there are available to configure
278         for (StreamConfiguration config : mDepthConfigurations) {
279             if (!config.isOutput()) {
280                 // Ignoring input depth configs
281                 continue;
282             }
283 
284             mDepthOutputFormats.put(config.getFormat(),
285                     mDepthOutputFormats.get(config.getFormat()) + 1);
286         }
287         for (StreamConfiguration config : mDynamicDepthConfigurations) {
288             if (!config.isOutput()) {
289                 // Ignoring input configs
290                 continue;
291             }
292 
293             mDynamicDepthOutputFormats.put(config.getFormat(),
294                     mDynamicDepthOutputFormats.get(config.getFormat()) + 1);
295         }
296 
297         // For each heic format, track how many sizes there are available to configure
298         for (StreamConfiguration config : mHeicConfigurations) {
299             if (!config.isOutput()) {
300                 // Ignoring input depth configs
301                 continue;
302             }
303 
304             mHeicOutputFormats.put(config.getFormat(),
305                     mHeicOutputFormats.get(config.getFormat()) + 1);
306         }
307 
308         if (configurations != null && enforceImplementationDefined &&
309                 mOutputFormats.indexOfKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) < 0) {
310             throw new AssertionError(
311                     "At least one stream configuration for IMPLEMENTATION_DEFINED must exist");
312         }
313 
314         // For each Size/FPS range, track how many FPS range/Size there are available
315         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
316             Size size = config.getSize();
317             Range<Integer> fpsRange = config.getFpsRange();
318             Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
319             if (fpsRangeCount == null) {
320                 fpsRangeCount = 0;
321             }
322             mHighSpeedVideoSizeMap.put(size, fpsRangeCount + 1);
323             Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
324             if (sizeCount == null) {
325                 sizeCount = 0;
326             }
327             mHighSpeedVideoFpsRangeMap.put(fpsRange, sizeCount + 1);
328         }
329 
330         mInputOutputFormatsMap = inputOutputFormatsMap;
331     }
332 
333     /**
334      * Get the image {@code format} output formats in this stream configuration.
335      *
336      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
337      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
338      *
339      * <p>Formats listed in this array are guaranteed to return true if queried with
340      * {@link #isOutputSupportedFor(int)}.</p>
341      *
342      * @return an array of integer format
343      *
344      * @see ImageFormat
345      * @see PixelFormat
346      */
getOutputFormats()347     public int[] getOutputFormats() {
348         return getPublicFormats(/*output*/true);
349     }
350 
351     /**
352      * Get the image {@code format} output formats for a reprocessing input format.
353      *
354      * <p>When submitting a {@link CaptureRequest} with an input Surface of a given format,
355      * the only allowed target outputs of the {@link CaptureRequest} are the ones with a format
356      * listed in the return value of this method. Including any other output Surface as a target
357      * will throw an IllegalArgumentException. If no output format is supported given the input
358      * format, an empty int[] will be returned.</p>
359      *
360      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
361      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
362      *
363      * <p>Formats listed in this array are guaranteed to return true if queried with
364      * {@link #isOutputSupportedFor(int)}.</p>
365      *
366      * @return an array of integer format
367      *
368      * @see ImageFormat
369      * @see PixelFormat
370      */
getValidOutputFormatsForInput(int inputFormat)371     public int[] getValidOutputFormatsForInput(int inputFormat) {
372         if (mInputOutputFormatsMap == null) {
373             return new int[0];
374         }
375 
376         int[] outputs = mInputOutputFormatsMap.getOutputs(inputFormat);
377         if (mHeicOutputFormats.size() > 0) {
378             // All reprocessing formats map contain JPEG.
379             int[] outputsWithHeic = Arrays.copyOf(outputs, outputs.length+1);
380             outputsWithHeic[outputs.length] = ImageFormat.HEIC;
381             return outputsWithHeic;
382         } else {
383             return outputs;
384         }
385     }
386 
387     /**
388      * Get the image {@code format} input formats in this stream configuration.
389      *
390      * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
391      * or in {@link PixelFormat} (and there is no possibility of collision).</p>
392      *
393      * @return an array of integer format
394      *
395      * @see ImageFormat
396      * @see PixelFormat
397      */
getInputFormats()398     public int[] getInputFormats() {
399         return getPublicFormats(/*output*/false);
400     }
401 
402     /**
403      * Get the supported input sizes for this input format.
404      *
405      * <p>The format must have come from {@link #getInputFormats}; otherwise
406      * {@code null} is returned.</p>
407      *
408      * @param format a format from {@link #getInputFormats}
409      * @return a non-empty array of sizes, or {@code null} if the format was not available.
410      */
getInputSizes(final int format)411     public Size[] getInputSizes(final int format) {
412         return getPublicFormatSizes(format, /*output*/false, /*highRes*/false);
413     }
414 
415     /**
416      * Determine whether or not output surfaces with a particular user-defined format can be passed
417      * {@link CameraDevice#createCaptureSession createCaptureSession}.
418      *
419      * <p>This method determines that the output {@code format} is supported by the camera device;
420      * each output {@code surface} target may or may not itself support that {@code format}.
421      * Refer to the class which provides the surface for additional documentation.</p>
422      *
423      * <p>Formats for which this returns {@code true} are guaranteed to exist in the result
424      * returned by {@link #getOutputSizes}.</p>
425      *
426      * @param format an image format from either {@link ImageFormat} or {@link PixelFormat}
427      * @return
428      *          {@code true} iff using a {@code surface} with this {@code format} will be
429      *          supported with {@link CameraDevice#createCaptureSession}
430      *
431      * @throws IllegalArgumentException
432      *          if the image format was not a defined named constant
433      *          from either {@link ImageFormat} or {@link PixelFormat}
434      *
435      * @see ImageFormat
436      * @see PixelFormat
437      * @see CameraDevice#createCaptureSession
438      */
isOutputSupportedFor(int format)439     public boolean isOutputSupportedFor(int format) {
440         checkArgumentFormat(format);
441 
442         int internalFormat = imageFormatToInternal(format);
443         int dataspace = imageFormatToDataspace(format);
444         if (dataspace == HAL_DATASPACE_DEPTH) {
445             return mDepthOutputFormats.indexOfKey(internalFormat) >= 0;
446         } else if (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) {
447             return mDynamicDepthOutputFormats.indexOfKey(internalFormat) >= 0;
448         } else if (dataspace == HAL_DATASPACE_HEIF) {
449             return mHeicOutputFormats.indexOfKey(internalFormat) >= 0;
450         } else {
451             return getFormatsMap(/*output*/true).indexOfKey(internalFormat) >= 0;
452         }
453     }
454 
455     /**
456      * Determine whether or not output streams can be configured with a particular class
457      * as a consumer.
458      *
459      * <p>The following list is generally usable for outputs:
460      * <ul>
461      * <li>{@link android.media.ImageReader} -
462      * Recommended for image processing or streaming to external resources (such as a file or
463      * network)
464      * <li>{@link android.media.MediaRecorder} -
465      * Recommended for recording video (simple to use)
466      * <li>{@link android.media.MediaCodec} -
467      * Recommended for recording video (more complicated to use, with more flexibility)
468      * <li>{@link android.renderscript.Allocation} -
469      * Recommended for image processing with {@link android.renderscript RenderScript}
470      * <li>{@link android.view.SurfaceHolder} -
471      * Recommended for low-power camera preview with {@link android.view.SurfaceView}
472      * <li>{@link android.graphics.SurfaceTexture} -
473      * Recommended for OpenGL-accelerated preview processing or compositing with
474      * {@link android.view.TextureView}
475      * </ul>
476      * </p>
477      *
478      * <p>Generally speaking this means that creating a {@link Surface} from that class <i>may</i>
479      * provide a producer endpoint that is suitable to be used with
480      * {@link CameraDevice#createCaptureSession}.</p>
481      *
482      * <p>Since not all of the above classes support output of all format and size combinations,
483      * the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.</p>
484      *
485      * @param klass a non-{@code null} {@link Class} object reference
486      * @return {@code true} if this class is supported as an output, {@code false} otherwise
487      *
488      * @throws NullPointerException if {@code klass} was {@code null}
489      *
490      * @see CameraDevice#createCaptureSession
491      * @see #isOutputSupportedFor(Surface)
492      */
isOutputSupportedFor(Class<T> klass)493     public static <T> boolean isOutputSupportedFor(Class<T> klass) {
494         Objects.requireNonNull(klass, "klass must not be null");
495 
496         if (klass == android.media.ImageReader.class) {
497             return true;
498         } else if (klass == android.media.MediaRecorder.class) {
499             return true;
500         } else if (klass == android.media.MediaCodec.class) {
501             return true;
502         } else if (klass == android.renderscript.Allocation.class) {
503             return true;
504         } else if (klass == android.view.SurfaceHolder.class) {
505             return true;
506         } else if (klass == android.graphics.SurfaceTexture.class) {
507             return true;
508         }
509 
510         return false;
511     }
512 
513     /**
514      * Determine whether or not the {@code surface} in its current state is suitable to be included
515      * in a {@link CameraDevice#createCaptureSession capture session} as an output.
516      *
517      * <p>Not all surfaces are usable with the {@link CameraDevice}, and not all configurations
518      * of that {@code surface} are compatible. Some classes that provide the {@code surface} are
519      * compatible with the {@link CameraDevice} in general
520      * (see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the
521      * {@code surface} into a state that will be compatible with the {@link CameraDevice}.</p>
522      *
523      * <p>Reasons for a {@code surface} being specifically incompatible might be:
524      * <ul>
525      * <li>Using a format that's not listed by {@link #getOutputFormats}
526      * <li>Using a format/size combination that's not listed by {@link #getOutputSizes}
527      * <li>The {@code surface} itself is not in a state where it can service a new producer.</p>
528      * </li>
529      * </ul>
530      *
531      * <p>Surfaces from flexible sources will return true even if the exact size of the Surface does
532      * not match a camera-supported size, as long as the format (or class) is supported and the
533      * camera device supports a size that is equal to or less than 1080p in that format. If such as
534      * Surface is used to create a capture session, it will have its size rounded to the nearest
535      * supported size, below or equal to 1080p. Flexible sources include SurfaceView, SurfaceTexture,
536      * and ImageReader.</p>
537      *
538      * <p>This is not an exhaustive list; see the particular class's documentation for further
539      * possible reasons of incompatibility.</p>
540      *
541      * @param surface a non-{@code null} {@link Surface} object reference
542      * @return {@code true} if this is supported, {@code false} otherwise
543      *
544      * @throws NullPointerException if {@code surface} was {@code null}
545      * @throws IllegalArgumentException if the Surface endpoint is no longer valid
546      *
547      * @see CameraDevice#createCaptureSession
548      * @see #isOutputSupportedFor(Class)
549      */
isOutputSupportedFor(Surface surface)550     public boolean isOutputSupportedFor(Surface surface) {
551         Objects.requireNonNull(surface, "surface must not be null");
552 
553         Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
554         int surfaceFormat = SurfaceUtils.getSurfaceFormat(surface);
555         int surfaceDataspace = SurfaceUtils.getSurfaceDataspace(surface);
556 
557         // See if consumer is flexible.
558         boolean isFlexible = SurfaceUtils.isFlexibleConsumer(surface);
559 
560         StreamConfiguration[] configs =
561                 surfaceDataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations :
562                 surfaceDataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations :
563                 surfaceDataspace == HAL_DATASPACE_HEIF ? mHeicConfigurations :
564                 mConfigurations;
565         for (StreamConfiguration config : configs) {
566             if (config.getFormat() == surfaceFormat && config.isOutput()) {
567                 // Matching format, either need exact size match, or a flexible consumer
568                 // and a size no bigger than MAX_DIMEN_FOR_ROUNDING
569                 if (config.getSize().equals(surfaceSize)) {
570                     return true;
571                 } else if (isFlexible &&
572                         (config.getSize().getWidth() <= MAX_DIMEN_FOR_ROUNDING)) {
573                     return true;
574                 }
575             }
576         }
577         return false;
578     }
579 
580     /**
581      * Determine whether or not the particular stream configuration is suitable to be included
582      * in a {@link CameraDevice#createCaptureSession capture session} as an output.
583      *
584      * @param size stream configuration size
585      * @param format stream configuration format
586      * @return {@code true} if this is supported, {@code false} otherwise
587      *
588      * @see CameraDevice#createCaptureSession
589      * @see #isOutputSupportedFor(Class)
590      * @hide
591      */
isOutputSupportedFor(Size size, int format)592     public boolean isOutputSupportedFor(Size size, int format) {
593         int internalFormat = imageFormatToInternal(format);
594         int dataspace = imageFormatToDataspace(format);
595 
596         StreamConfiguration[] configs =
597                 dataspace == HAL_DATASPACE_DEPTH ? mDepthConfigurations :
598                 dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthConfigurations :
599                 dataspace == HAL_DATASPACE_HEIF ? mHeicConfigurations :
600                 mConfigurations;
601         for (StreamConfiguration config : configs) {
602             if ((config.getFormat() == internalFormat) && config.isOutput() &&
603                     config.getSize().equals(size)) {
604                 return true;
605             }
606         }
607 
608         return false;
609     }
610 
611     /**
612      * Get a list of sizes compatible with {@code klass} to use as an output.
613      *
614      * <p>Some of the supported classes may support additional formats beyond
615      * {@link ImageFormat#PRIVATE}; this function only returns
616      * sizes for {@link ImageFormat#PRIVATE}. For example, {@link android.media.ImageReader}
617      * supports {@link ImageFormat#YUV_420_888} and {@link ImageFormat#PRIVATE}, this method will
618      * only return the sizes for {@link ImageFormat#PRIVATE} for {@link android.media.ImageReader}
619      * class.</p>
620      *
621      * <p>If a well-defined format such as {@code NV21} is required, use
622      * {@link #getOutputSizes(int)} instead.</p>
623      *
624      * <p>The {@code klass} should be a supported output, that querying
625      * {@code #isOutputSupportedFor(Class)} should return {@code true}.</p>
626      *
627      * @param klass
628      *          a non-{@code null} {@link Class} object reference
629      * @return
630      *          an array of supported sizes for {@link ImageFormat#PRIVATE} format,
631      *          or {@code null} iff the {@code klass} is not a supported output.
632      *
633      *
634      * @throws NullPointerException if {@code klass} was {@code null}
635      *
636      * @see #isOutputSupportedFor(Class)
637      */
getOutputSizes(Class<T> klass)638     public <T> Size[] getOutputSizes(Class<T> klass) {
639         if (isOutputSupportedFor(klass) == false) {
640             return null;
641         }
642 
643         return getInternalFormatSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
644                 HAL_DATASPACE_UNKNOWN,/*output*/true, /*highRes*/false);
645     }
646 
647     /**
648      * Get a list of sizes compatible with the requested image {@code format}.
649      *
650      * <p>The {@code format} should be a supported format (one of the formats returned by
651      * {@link #getOutputFormats}).</p>
652      *
653      * As of API level 23, the {@link #getHighResolutionOutputSizes} method can be used on devices
654      * that support the
655      * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
656      * capability to get a list of high-resolution output sizes that cannot operate at the preferred
657      * 20fps rate. This means that for some supported formats, this method will return an empty
658      * list, if all the supported resolutions operate at below 20fps.  For devices that do not
659      * support the BURST_CAPTURE capability, all output resolutions are listed through this method.
660      *
661      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
662      * @return
663      *          an array of supported sizes,
664      *          or {@code null} if the {@code format} is not a supported output
665      *
666      * @see ImageFormat
667      * @see PixelFormat
668      * @see #getOutputFormats
669      */
getOutputSizes(int format)670     public Size[] getOutputSizes(int format) {
671         return getPublicFormatSizes(format, /*output*/true, /*highRes*/ false);
672     }
673 
674     /**
675      * Get a list of supported high speed video recording sizes.
676      * <p>
677      * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
678      * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
679      * list the supported high speed video size configurations. All the sizes listed will be a
680      * subset of the sizes reported by {@link #getOutputSizes} for processed non-stalling formats
681      * (typically {@link ImageFormat#PRIVATE} {@link ImageFormat#YUV_420_888}, etc.)
682      * </p>
683      * <p>
684      * To enable high speed video recording, application must create a constrained create high speed
685      * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
686      * a CaptureRequest list created by
687      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
688      * to this session. The application must select the video size from this method and
689      * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
690      * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
691      * generate the high speed request list. For example, if the application intends to do high
692      * speed recording, it can select the maximum size reported by this method to create high speed
693      * capture session. Note that for the use case of multiple output streams, application must
694      * select one unique size from this method to use (e.g., preview and recording streams must have
695      * the same size). Otherwise, the high speed session creation will fail. Once the size is
696      * selected, application can get the supported FPS ranges by
697      * {@link #getHighSpeedVideoFpsRangesFor}, and use these FPS ranges to setup the recording
698      * request lists via
699      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
700      * </p>
701      *
702      * @return an array of supported high speed video recording sizes
703      * @see #getHighSpeedVideoFpsRangesFor(Size)
704      * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
705      * @see CameraDevice#createConstrainedHighSpeedCaptureSession
706      * @see android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList
707      */
getHighSpeedVideoSizes()708     public Size[] getHighSpeedVideoSizes() {
709         Set<Size> keySet = mHighSpeedVideoSizeMap.keySet();
710         return keySet.toArray(new Size[keySet.size()]);
711     }
712 
713     /**
714      * Get the frame per second ranges (fpsMin, fpsMax) for input high speed video size.
715      * <p>
716      * See {@link #getHighSpeedVideoFpsRanges} for how to enable high speed recording.
717      * </p>
718      * <p>
719      * The {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS ranges} reported in this method
720      * must not be used to setup capture requests that are submitted to unconstrained capture
721      * sessions, or it will result in {@link IllegalArgumentException IllegalArgumentExceptions}.
722      * </p>
723      * <p>
724      * See {@link #getHighSpeedVideoFpsRanges} for the characteristics of the returned FPS ranges.
725      * </p>
726      *
727      * @param size one of the sizes returned by {@link #getHighSpeedVideoSizes()}
728      * @return an array of supported high speed video recording FPS ranges The upper bound of
729      *         returned ranges is guaranteed to be greater than or equal to 120.
730      * @throws IllegalArgumentException if input size does not exist in the return value of
731      *             getHighSpeedVideoSizes
732      * @see #getHighSpeedVideoSizes()
733      * @see #getHighSpeedVideoFpsRanges()
734      */
getHighSpeedVideoFpsRangesFor(Size size)735     public Range<Integer>[] getHighSpeedVideoFpsRangesFor(Size size) {
736         Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
737         if (fpsRangeCount == null || fpsRangeCount == 0) {
738             throw new IllegalArgumentException(String.format(
739                     "Size %s does not support high speed video recording", size));
740         }
741 
742         @SuppressWarnings("unchecked")
743         Range<Integer>[] fpsRanges = new Range[fpsRangeCount];
744         int i = 0;
745         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
746             if (size.equals(config.getSize())) {
747                 fpsRanges[i++] = config.getFpsRange();
748             }
749         }
750         return fpsRanges;
751     }
752 
753     /**
754      * Get a list of supported high speed video recording FPS ranges.
755      * <p>
756      * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
757      * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
758      * list the supported high speed video FPS range configurations. Application can then use
759      * {@link #getHighSpeedVideoSizesFor} to query available sizes for one of returned FPS range.
760      * </p>
761      * <p>
762      * To enable high speed video recording, application must create a constrained create high speed
763      * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
764      * a CaptureRequest list created by
765      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
766      * to this session. The application must select the video size from this method and
767      * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
768      * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
769      * generate the high speed request list. For example, if the application intends to do high
770      * speed recording, it can select one FPS range reported by this method, query the video sizes
771      * corresponding to this FPS range by {@link #getHighSpeedVideoSizesFor} and use one of reported
772      * sizes to create a high speed capture session. Note that for the use case of multiple output
773      * streams, application must select one unique size from this method to use (e.g., preview and
774      * recording streams must have the same size). Otherwise, the high speed session creation will
775      * fail. Once the high speed capture session is created, the application can set the FPS range
776      * in the recording request lists via
777      * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
778      * </p>
779      * <p>
780      * The FPS ranges reported by this method will have below characteristics:
781      * <li>The fpsMin and fpsMax will be a multiple 30fps.</li>
782      * <li>The fpsMin will be no less than 30fps, the fpsMax will be no less than 120fps.</li>
783      * <li>At least one range will be a fixed FPS range where fpsMin == fpsMax.</li>
784      * <li>For each fixed FPS range, there will be one corresponding variable FPS range [30,
785      * fps_max]. These kinds of FPS ranges are suitable for preview-only use cases where the
786      * application doesn't want the camera device always produce higher frame rate than the display
787      * refresh rate.</li>
788      * </p>
789      *
790      * @return an array of supported high speed video recording FPS ranges The upper bound of
791      *         returned ranges is guaranteed to be larger or equal to 120.
792      * @see #getHighSpeedVideoSizesFor
793      * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
794      * @see CameraDevice#createConstrainedHighSpeedCaptureSession
795      * @see android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList
796      */
797     @SuppressWarnings("unchecked")
getHighSpeedVideoFpsRanges()798     public Range<Integer>[] getHighSpeedVideoFpsRanges() {
799         Set<Range<Integer>> keySet = mHighSpeedVideoFpsRangeMap.keySet();
800         return keySet.toArray(new Range[keySet.size()]);
801     }
802 
803     /**
804      * Get the supported video sizes for an input high speed FPS range.
805      *
806      * <p> See {@link #getHighSpeedVideoSizes} for how to enable high speed recording.</p>
807      *
808      * @param fpsRange one of the FPS range returned by {@link #getHighSpeedVideoFpsRanges()}
809      * @return An array of video sizes to create high speed capture sessions for high speed streaming
810      *         use cases.
811      *
812      * @throws IllegalArgumentException if input FPS range does not exist in the return value of
813      *         getHighSpeedVideoFpsRanges
814      * @see #getHighSpeedVideoFpsRanges()
815      */
getHighSpeedVideoSizesFor(Range<Integer> fpsRange)816     public Size[] getHighSpeedVideoSizesFor(Range<Integer> fpsRange) {
817         Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
818         if (sizeCount == null || sizeCount == 0) {
819             throw new IllegalArgumentException(String.format(
820                     "FpsRange %s does not support high speed video recording", fpsRange));
821         }
822 
823         Size[] sizes = new Size[sizeCount];
824         int i = 0;
825         for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
826             if (fpsRange.equals(config.getFpsRange())) {
827                 sizes[i++] = config.getSize();
828             }
829         }
830         return sizes;
831     }
832 
833     /**
834      * Get a list of supported high resolution sizes, which cannot operate at full BURST_CAPTURE
835      * rate.
836      *
837      * <p>This includes all output sizes that cannot meet the 20 fps frame rate requirements for the
838      * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
839      * capability.  This does not include the stall duration, so for example, a JPEG or RAW16 output
840      * resolution with a large stall duration but a minimum frame duration that's above 20 fps will
841      * still be listed in the regular {@link #getOutputSizes} list. All the sizes on this list that
842      * are less than 24 megapixels are still guaranteed to operate at a rate of at least 10 fps,
843      * not including stall duration. Sizes on this list that are at least 24 megapixels are allowed
844      * to operate at less than 10 fps.</p>
845      *
846      * <p>For a device that does not support the BURST_CAPTURE capability, this list will be
847      * {@code null}, since resolutions in the {@link #getOutputSizes} list are already not
848      * guaranteed to meet &gt;= 20 fps rate requirements. For a device that does support the
849      * BURST_CAPTURE capability, this list may be empty, if all supported resolutions meet the 20
850      * fps requirement.</p>
851      *
852      * @return an array of supported slower high-resolution sizes, or {@code null} if the
853      *         BURST_CAPTURE capability is not supported
854      */
getHighResolutionOutputSizes(int format)855     public Size[] getHighResolutionOutputSizes(int format) {
856         if (!mListHighResolution) return null;
857 
858         return getPublicFormatSizes(format, /*output*/true, /*highRes*/ true);
859     }
860 
861     /**
862      * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
863      * for the format/size combination (in nanoseconds).
864      *
865      * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
866      * <p>{@code size} should be one of the ones returned by
867      * {@link #getOutputSizes(int)}.</p>
868      *
869      * <p>This corresponds to the minimum frame duration (maximum frame rate) possible when only
870      * that stream is configured in a session, with all processing (typically in
871      * {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.  </p>
872      *
873      * <p>When multiple streams are used in a session, the minimum frame duration will be
874      * {@code max(individual stream min durations)}.  See {@link #getOutputStallDuration} for
875      * details of timing for formats that may cause frame rate slowdown when they are targeted by a
876      * capture request.</p>
877      *
878      * <p>For devices that do not support manual sensor control
879      * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
880      * this function may return 0.</p>
881      *
882      * <p>The minimum frame duration of a stream (of a particular format, size) is the same
883      * regardless of whether the stream is input or output.</p>
884      *
885      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
886      * @param size an output-compatible size
887      * @return a minimum frame duration {@code >} 0 in nanoseconds, or
888      *          0 if the minimum frame duration is not available.
889      *
890      * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
891      * @throws NullPointerException if {@code size} was {@code null}
892      *
893      * @see CaptureRequest#SENSOR_FRAME_DURATION
894      * @see #getOutputStallDuration(int, Size)
895      * @see ImageFormat
896      * @see PixelFormat
897      */
getOutputMinFrameDuration(int format, Size size)898     public long getOutputMinFrameDuration(int format, Size size) {
899         Objects.requireNonNull(size, "size must not be null");
900         checkArgumentFormatSupported(format, /*output*/true);
901 
902         return getInternalFormatDuration(imageFormatToInternal(format),
903                 imageFormatToDataspace(format),
904                 size,
905                 DURATION_MIN_FRAME);
906     }
907 
908     /**
909      * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
910      * for the class/size combination (in nanoseconds).
911      *
912      * <p>This assumes that the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
913      * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
914      *
915      * <p>{@code klass} should be one of the ones which is supported by
916      * {@link #isOutputSupportedFor(Class)}.</p>
917      *
918      * <p>{@code size} should be one of the ones returned by
919      * {@link #getOutputSizes(int)}.</p>
920      *
921      * <p>This corresponds to the minimum frame duration (maximum frame rate) possible when only
922      * that stream is configured in a session, with all processing (typically in
923      * {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.  </p>
924      *
925      * <p>When multiple streams are used in a session, the minimum frame duration will be
926      * {@code max(individual stream min durations)}.  See {@link #getOutputStallDuration} for
927      * details of timing for formats that may cause frame rate slowdown when they are targeted by a
928      * capture request.</p>
929      *
930      * <p>For devices that do not support manual sensor control
931      * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
932      * this function may return 0.</p>
933      *
934      * <p>The minimum frame duration of a stream (of a particular format, size) is the same
935      * regardless of whether the stream is input or output.</p>
936      *
937      * @param klass
938      *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
939      *          non-empty array returned by {@link #getOutputSizes(Class)}
940      * @param size an output-compatible size
941      * @return a minimum frame duration {@code >} 0 in nanoseconds, or
942      *          0 if the minimum frame duration is not available.
943      *
944      * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
945      * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
946      *
947      * @see CaptureRequest#SENSOR_FRAME_DURATION
948      * @see ImageFormat
949      * @see PixelFormat
950      */
getOutputMinFrameDuration(final Class<T> klass, final Size size)951     public <T> long getOutputMinFrameDuration(final Class<T> klass, final Size size) {
952         if (!isOutputSupportedFor(klass)) {
953             throw new IllegalArgumentException("klass was not supported");
954         }
955 
956         return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
957                 HAL_DATASPACE_UNKNOWN,
958                 size, DURATION_MIN_FRAME);
959     }
960 
961     /**
962      * Get the stall duration for the format/size combination (in nanoseconds).
963      *
964      * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
965      * <p>{@code size} should be one of the ones returned by
966      * {@link #getOutputSizes(int)}.</p>
967      *
968      * <p>
969      * A stall duration is how much extra time would get added to the normal minimum frame duration
970      * for a repeating request that has streams with non-zero stall.
971      *
972      * <p>For example, consider JPEG captures which have the following characteristics:
973      *
974      * <ul>
975      * <li>JPEG streams act like processed YUV streams in requests for which they are not included;
976      * in requests in which they are directly referenced, they act as JPEG streams.
977      * This is because supporting a JPEG stream requires the underlying YUV data to always be ready
978      * for use by a JPEG encoder, but the encoder will only be used (and impact frame duration) on
979      * requests that actually reference a JPEG stream.
980      * <li>The JPEG processor can run concurrently to the rest of the camera pipeline, but cannot
981      * process more than 1 capture at a time.
982      * </ul>
983      *
984      * <p>In other words, using a repeating YUV request would result in a steady frame rate
985      * (let's say it's 30 FPS). If a single JPEG request is submitted periodically,
986      * the frame rate will stay at 30 FPS (as long as we wait for the previous JPEG to return each
987      * time). If we try to submit a repeating YUV + JPEG request, then the frame rate will drop from
988      * 30 FPS.</p>
989      *
990      * <p>In general, submitting a new request with a non-0 stall time stream will <em>not</em> cause a
991      * frame rate drop unless there are still outstanding buffers for that stream from previous
992      * requests.</p>
993      *
994      * <p>Submitting a repeating request with streams (call this {@code S}) is the same as setting
995      * the minimum frame duration from the normal minimum frame duration corresponding to {@code S},
996      * added with the maximum stall duration for {@code S}.</p>
997      *
998      * <p>If interleaving requests with and without a stall duration, a request will stall by the
999      * maximum of the remaining times for each can-stall stream with outstanding buffers.</p>
1000      *
1001      * <p>This means that a stalling request will not have an exposure start until the stall has
1002      * completed.</p>
1003      *
1004      * <p>This should correspond to the stall duration when only that stream is active, with all
1005      * processing (typically in {@code android.*.mode}) set to {@code FAST} or {@code OFF}.
1006      * Setting any of the processing modes to {@code HIGH_QUALITY} effectively results in an
1007      * indeterminate stall duration for all streams in a request (the regular stall calculation
1008      * rules are ignored).</p>
1009      *
1010      * <p>The following formats may always have a stall duration:
1011      * <ul>
1012      * <li>{@link ImageFormat#JPEG JPEG}
1013      * <li>{@link ImageFormat#RAW_SENSOR RAW16}
1014      * <li>{@link ImageFormat#RAW_PRIVATE RAW_PRIVATE}
1015      * </ul>
1016      * </p>
1017      *
1018      * <p>The following formats will never have a stall duration:
1019      * <ul>
1020      * <li>{@link ImageFormat#YUV_420_888 YUV_420_888}
1021      * <li>{@link #isOutputSupportedFor(Class) Implementation-Defined}
1022      * </ul></p>
1023      *
1024      * <p>
1025      * All other formats may or may not have an allowed stall duration on a per-capability basis;
1026      * refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
1027      * android.request.availableCapabilities} for more details.</p>
1028      * </p>
1029      *
1030      * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
1031      * for more information about calculating the max frame rate (absent stalls).</p>
1032      *
1033      * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
1034      * @param size an output-compatible size
1035      * @return a stall duration {@code >=} 0 in nanoseconds
1036      *
1037      * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
1038      * @throws NullPointerException if {@code size} was {@code null}
1039      *
1040      * @see CaptureRequest#SENSOR_FRAME_DURATION
1041      * @see ImageFormat
1042      * @see PixelFormat
1043      */
getOutputStallDuration(int format, Size size)1044     public long getOutputStallDuration(int format, Size size) {
1045         checkArgumentFormatSupported(format, /*output*/true);
1046 
1047         return getInternalFormatDuration(imageFormatToInternal(format),
1048                 imageFormatToDataspace(format),
1049                 size,
1050                 DURATION_STALL);
1051     }
1052 
1053     /**
1054      * Get the stall duration for the class/size combination (in nanoseconds).
1055      *
1056      * <p>This assumes that the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
1057      * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
1058      *
1059      * <p>{@code klass} should be one of the ones with a non-empty array returned by
1060      * {@link #getOutputSizes(Class)}.</p>
1061      *
1062      * <p>{@code size} should be one of the ones returned by
1063      * {@link #getOutputSizes(Class)}.</p>
1064      *
1065      * <p>See {@link #getOutputStallDuration(int, Size)} for a definition of a
1066      * <em>stall duration</em>.</p>
1067      *
1068      * @param klass
1069      *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
1070      *          non-empty array returned by {@link #getOutputSizes(Class)}
1071      * @param size an output-compatible size
1072      * @return a minimum frame duration {@code >=} 0 in nanoseconds
1073      *
1074      * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
1075      * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
1076      *
1077      * @see CaptureRequest#SENSOR_FRAME_DURATION
1078      * @see ImageFormat
1079      * @see PixelFormat
1080      */
getOutputStallDuration(final Class<T> klass, final Size size)1081     public <T> long getOutputStallDuration(final Class<T> klass, final Size size) {
1082         if (!isOutputSupportedFor(klass)) {
1083             throw new IllegalArgumentException("klass was not supported");
1084         }
1085 
1086         return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1087                 HAL_DATASPACE_UNKNOWN, size, DURATION_STALL);
1088     }
1089 
1090     /**
1091      * Check if this {@link StreamConfigurationMap} is equal to another
1092      * {@link StreamConfigurationMap}.
1093      *
1094      * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
1095      *
1096      * @return {@code true} if the objects were equal, {@code false} otherwise
1097      */
1098     @Override
equals(final Object obj)1099     public boolean equals(final Object obj) {
1100         if (obj == null) {
1101             return false;
1102         }
1103         if (this == obj) {
1104             return true;
1105         }
1106         if (obj instanceof StreamConfigurationMap) {
1107             final StreamConfigurationMap other = (StreamConfigurationMap) obj;
1108             // XX: do we care about order?
1109             return Arrays.equals(mConfigurations, other.mConfigurations) &&
1110                     Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) &&
1111                     Arrays.equals(mStallDurations, other.mStallDurations) &&
1112                     Arrays.equals(mDepthConfigurations, other.mDepthConfigurations) &&
1113                     Arrays.equals(mDepthMinFrameDurations, other.mDepthMinFrameDurations) &&
1114                     Arrays.equals(mDepthStallDurations, other.mDepthStallDurations) &&
1115                     Arrays.equals(mDynamicDepthConfigurations, other.mDynamicDepthConfigurations) &&
1116                     Arrays.equals(mDynamicDepthMinFrameDurations,
1117                             other.mDynamicDepthMinFrameDurations) &&
1118                     Arrays.equals(mDynamicDepthStallDurations, other.mDynamicDepthStallDurations) &&
1119                     Arrays.equals(mHeicConfigurations, other.mHeicConfigurations) &&
1120                     Arrays.equals(mHeicMinFrameDurations, other.mHeicMinFrameDurations) &&
1121                     Arrays.equals(mHeicStallDurations, other.mHeicStallDurations) &&
1122                     Arrays.equals(mHighSpeedVideoConfigurations,
1123                             other.mHighSpeedVideoConfigurations);
1124         }
1125         return false;
1126     }
1127 
1128     /**
1129      * {@inheritDoc}
1130      */
1131     @Override
hashCode()1132     public int hashCode() {
1133         // XX: do we care about order?
1134         return HashCodeHelpers.hashCodeGeneric(
1135                 mConfigurations, mMinFrameDurations, mStallDurations,
1136                 mDepthConfigurations, mDepthMinFrameDurations, mDepthStallDurations,
1137                 mDynamicDepthConfigurations, mDynamicDepthMinFrameDurations,
1138                 mDynamicDepthStallDurations, mHeicConfigurations,
1139                 mHeicMinFrameDurations, mHeicStallDurations,
1140                 mHighSpeedVideoConfigurations);
1141     }
1142 
1143     // Check that the argument is supported by #getOutputFormats or #getInputFormats
checkArgumentFormatSupported(int format, boolean output)1144     private int checkArgumentFormatSupported(int format, boolean output) {
1145         checkArgumentFormat(format);
1146 
1147         int internalFormat = imageFormatToInternal(format);
1148         int internalDataspace = imageFormatToDataspace(format);
1149 
1150         if (output) {
1151             if (internalDataspace == HAL_DATASPACE_DEPTH) {
1152                 if (mDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
1153                     return format;
1154                 }
1155             } else if (internalDataspace == HAL_DATASPACE_DYNAMIC_DEPTH) {
1156                 if (mDynamicDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
1157                     return format;
1158                 }
1159             } else if (internalDataspace == HAL_DATASPACE_HEIF) {
1160                 if (mHeicOutputFormats.indexOfKey(internalFormat) >= 0) {
1161                     return format;
1162                 }
1163             } else {
1164                 if (mAllOutputFormats.indexOfKey(internalFormat) >= 0) {
1165                     return format;
1166                 }
1167             }
1168         } else {
1169             if (mInputFormats.indexOfKey(internalFormat) >= 0) {
1170                 return format;
1171             }
1172         }
1173 
1174         throw new IllegalArgumentException(String.format(
1175                 "format %x is not supported by this stream configuration map", format));
1176     }
1177 
1178     /**
1179      * Ensures that the format is either user-defined or implementation defined.
1180      *
1181      * <p>If a format has a different internal representation than the public representation,
1182      * passing in the public representation here will fail.</p>
1183      *
1184      * <p>For example if trying to use {@link ImageFormat#JPEG}:
1185      * it has a different public representation than the internal representation
1186      * {@code HAL_PIXEL_FORMAT_BLOB}, this check will fail.</p>
1187      *
1188      * <p>Any invalid/undefined formats will raise an exception.</p>
1189      *
1190      * @param format image format
1191      * @return the format
1192      *
1193      * @throws IllegalArgumentException if the format was invalid
1194      */
checkArgumentFormatInternal(int format)1195     static int checkArgumentFormatInternal(int format) {
1196         switch (format) {
1197             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1198             case HAL_PIXEL_FORMAT_BLOB:
1199             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1200             case HAL_PIXEL_FORMAT_Y16:
1201                 return format;
1202             case ImageFormat.JPEG:
1203             case ImageFormat.HEIC:
1204                 throw new IllegalArgumentException(
1205                         "An unknown internal format: " + format);
1206             default:
1207                 return checkArgumentFormat(format);
1208         }
1209     }
1210 
1211     /**
1212      * Ensures that the format is publicly user-defined in either ImageFormat or PixelFormat.
1213      *
1214      * <p>If a format has a different public representation than the internal representation,
1215      * passing in the internal representation here will fail.</p>
1216      *
1217      * <p>For example if trying to use {@code HAL_PIXEL_FORMAT_BLOB}:
1218      * it has a different internal representation than the public representation
1219      * {@link ImageFormat#JPEG}, this check will fail.</p>
1220      *
1221      * <p>Any invalid/undefined formats will raise an exception, including implementation-defined.
1222      * </p>
1223      *
1224      * <p>Note that {@code @hide} and deprecated formats will not pass this check.</p>
1225      *
1226      * @param format image format
1227      * @return the format
1228      *
1229      * @throws IllegalArgumentException if the format was not user-defined
1230      */
checkArgumentFormat(int format)1231     static int checkArgumentFormat(int format) {
1232         if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
1233             throw new IllegalArgumentException(String.format(
1234                     "format 0x%x was not defined in either ImageFormat or PixelFormat", format));
1235         }
1236 
1237         return format;
1238     }
1239 
1240     /**
1241      * Convert an internal format compatible with {@code graphics.h} into public-visible
1242      * {@code ImageFormat}. This assumes the dataspace of the format is not HAL_DATASPACE_DEPTH.
1243      *
1244      * <p>In particular these formats are converted:
1245      * <ul>
1246      * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.JPEG</li>
1247      * </ul>
1248      * </p>
1249      *
1250      * <p>Passing in a format which has no public equivalent will fail;
1251      * as will passing in a public format which has a different internal format equivalent.
1252      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1253      *
1254      * <p>All other formats are returned as-is, no further invalid check is performed.</p>
1255      *
1256      * <p>This function is the dual of {@link #imageFormatToInternal} for dataspaces other than
1257      * HAL_DATASPACE_DEPTH.</p>
1258      *
1259      * @param format image format from {@link ImageFormat} or {@link PixelFormat}
1260      * @return the converted image formats
1261      *
1262      * @throws IllegalArgumentException
1263      *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
1264      *          {@link ImageFormat#JPEG}
1265      *
1266      * @see ImageFormat
1267      * @see PixelFormat
1268      * @see #checkArgumentFormat
1269      * @hide
1270      */
imageFormatToPublic(int format)1271     public static int imageFormatToPublic(int format) {
1272         switch (format) {
1273             case HAL_PIXEL_FORMAT_BLOB:
1274                 return ImageFormat.JPEG;
1275             case ImageFormat.JPEG:
1276                 throw new IllegalArgumentException(
1277                         "ImageFormat.JPEG is an unknown internal format");
1278             default:
1279                 return format;
1280         }
1281     }
1282 
1283     /**
1284      * Convert an internal format compatible with {@code graphics.h} into public-visible
1285      * {@code ImageFormat}. This assumes the dataspace of the format is HAL_DATASPACE_DEPTH.
1286      *
1287      * <p>In particular these formats are converted:
1288      * <ul>
1289      * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.DEPTH_POINT_CLOUD
1290      * <li>HAL_PIXEL_FORMAT_Y16 => ImageFormat.DEPTH16
1291      * </ul>
1292      * </p>
1293      *
1294      * <p>Passing in an implementation-defined format which has no public equivalent will fail;
1295      * as will passing in a public format which has a different internal format equivalent.
1296      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1297      *
1298      * <p>All other formats are returned as-is, no further invalid check is performed.</p>
1299      *
1300      * <p>This function is the dual of {@link #imageFormatToInternal} for formats associated with
1301      * HAL_DATASPACE_DEPTH.</p>
1302      *
1303      * @param format image format from {@link ImageFormat} or {@link PixelFormat}
1304      * @return the converted image formats
1305      *
1306      * @throws IllegalArgumentException
1307      *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
1308      *          {@link ImageFormat#JPEG}
1309      *
1310      * @see ImageFormat
1311      * @see PixelFormat
1312      * @see #checkArgumentFormat
1313      * @hide
1314      */
depthFormatToPublic(int format)1315     public static int depthFormatToPublic(int format) {
1316         switch (format) {
1317             case HAL_PIXEL_FORMAT_BLOB:
1318                 return ImageFormat.DEPTH_POINT_CLOUD;
1319             case HAL_PIXEL_FORMAT_Y16:
1320                 return ImageFormat.DEPTH16;
1321             case HAL_PIXEL_FORMAT_RAW16:
1322                 return ImageFormat.RAW_DEPTH;
1323             case HAL_PIXEL_FORMAT_RAW10:
1324                 return ImageFormat.RAW_DEPTH10;
1325             case ImageFormat.JPEG:
1326                 throw new IllegalArgumentException(
1327                         "ImageFormat.JPEG is an unknown internal format");
1328             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1329                 throw new IllegalArgumentException(
1330                         "IMPLEMENTATION_DEFINED must not leak to public API");
1331             default:
1332                 throw new IllegalArgumentException(
1333                         "Unknown DATASPACE_DEPTH format " + format);
1334         }
1335     }
1336 
1337     /**
1338      * Convert image formats from internal to public formats (in-place).
1339      *
1340      * @param formats an array of image formats
1341      * @return {@code formats}
1342      *
1343      * @see #imageFormatToPublic
1344      */
imageFormatToPublic(int[] formats)1345     static int[] imageFormatToPublic(int[] formats) {
1346         if (formats == null) {
1347             return null;
1348         }
1349 
1350         for (int i = 0; i < formats.length; ++i) {
1351             formats[i] = imageFormatToPublic(formats[i]);
1352         }
1353 
1354         return formats;
1355     }
1356 
1357     /**
1358      * Convert a public format compatible with {@code ImageFormat} to an internal format
1359      * from {@code graphics.h}.
1360      *
1361      * <p>In particular these formats are converted:
1362      * <ul>
1363      * <li>ImageFormat.JPEG => HAL_PIXEL_FORMAT_BLOB
1364      * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_PIXEL_FORMAT_BLOB
1365      * <li>ImageFormat.DEPTH_JPEG => HAL_PIXEL_FORMAT_BLOB
1366      * <li>ImageFormat.HEIC => HAL_PIXEL_FORMAT_BLOB
1367      * <li>ImageFormat.DEPTH16 => HAL_PIXEL_FORMAT_Y16
1368      * </ul>
1369      * </p>
1370      *
1371      * <p>Passing in an internal format which has a different public format equivalent will fail.
1372      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1373      *
1374      * <p>All other formats are returned as-is, no invalid check is performed.</p>
1375      *
1376      * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1377      *
1378      * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1379      * @return the converted image formats
1380      *
1381      * @see ImageFormat
1382      * @see PixelFormat
1383      *
1384      * @throws IllegalArgumentException
1385      *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1386      */
imageFormatToInternal(int format)1387     static int imageFormatToInternal(int format) {
1388         switch (format) {
1389             case ImageFormat.JPEG:
1390             case ImageFormat.DEPTH_POINT_CLOUD:
1391             case ImageFormat.DEPTH_JPEG:
1392             case ImageFormat.HEIC:
1393                 return HAL_PIXEL_FORMAT_BLOB;
1394             case ImageFormat.DEPTH16:
1395                 return HAL_PIXEL_FORMAT_Y16;
1396             case ImageFormat.RAW_DEPTH:
1397                 return HAL_PIXEL_FORMAT_RAW16;
1398             case ImageFormat.RAW_DEPTH10:
1399                 return HAL_PIXEL_FORMAT_RAW10;
1400             default:
1401                 return format;
1402         }
1403     }
1404 
1405     /**
1406      * Convert a public format compatible with {@code ImageFormat} to an internal dataspace
1407      * from {@code graphics.h}.
1408      *
1409      * <p>In particular these formats are converted:
1410      * <ul>
1411      * <li>ImageFormat.JPEG => HAL_DATASPACE_V0_JFIF
1412      * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_DATASPACE_DEPTH
1413      * <li>ImageFormat.DEPTH16 => HAL_DATASPACE_DEPTH
1414      * <li>ImageFormat.DEPTH_JPEG => HAL_DATASPACE_DYNAMIC_DEPTH
1415      * <li>ImageFormat.HEIC => HAL_DATASPACE_HEIF
1416      * <li>others => HAL_DATASPACE_UNKNOWN
1417      * </ul>
1418      * </p>
1419      *
1420      * <p>Passing in an implementation-defined format here will fail (it's not a public format);
1421      * as will passing in an internal format which has a different public format equivalent.
1422      * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1423      *
1424      * <p>All other formats are returned as-is, no invalid check is performed.</p>
1425      *
1426      * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1427      *
1428      * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1429      * @return the converted image formats
1430      *
1431      * @see ImageFormat
1432      * @see PixelFormat
1433      *
1434      * @throws IllegalArgumentException
1435      *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1436      */
imageFormatToDataspace(int format)1437     static int imageFormatToDataspace(int format) {
1438         switch (format) {
1439             case ImageFormat.JPEG:
1440                 return HAL_DATASPACE_V0_JFIF;
1441             case ImageFormat.DEPTH_POINT_CLOUD:
1442             case ImageFormat.DEPTH16:
1443             case ImageFormat.RAW_DEPTH:
1444             case ImageFormat.RAW_DEPTH10:
1445                 return HAL_DATASPACE_DEPTH;
1446             case ImageFormat.DEPTH_JPEG:
1447                 return HAL_DATASPACE_DYNAMIC_DEPTH;
1448             case ImageFormat.HEIC:
1449                 return HAL_DATASPACE_HEIF;
1450             default:
1451                 return HAL_DATASPACE_UNKNOWN;
1452         }
1453     }
1454 
1455     /**
1456      * Convert image formats from public to internal formats (in-place).
1457      *
1458      * @param formats an array of image formats
1459      * @return {@code formats}
1460      *
1461      * @see #imageFormatToInternal
1462      *
1463      * @hide
1464      */
imageFormatToInternal(int[] formats)1465     public static int[] imageFormatToInternal(int[] formats) {
1466         if (formats == null) {
1467             return null;
1468         }
1469 
1470         for (int i = 0; i < formats.length; ++i) {
1471             formats[i] = imageFormatToInternal(formats[i]);
1472         }
1473 
1474         return formats;
1475     }
1476 
getPublicFormatSizes(int format, boolean output, boolean highRes)1477     private Size[] getPublicFormatSizes(int format, boolean output, boolean highRes) {
1478         try {
1479             checkArgumentFormatSupported(format, output);
1480         } catch (IllegalArgumentException e) {
1481             return null;
1482         }
1483 
1484         int internalFormat = imageFormatToInternal(format);
1485         int dataspace = imageFormatToDataspace(format);
1486 
1487         return getInternalFormatSizes(internalFormat, dataspace, output, highRes);
1488     }
1489 
getInternalFormatSizes(int format, int dataspace, boolean output, boolean highRes)1490     private Size[] getInternalFormatSizes(int format, int dataspace,
1491             boolean output, boolean highRes) {
1492         // All depth formats are non-high-res.
1493         if (dataspace == HAL_DATASPACE_DEPTH && highRes) {
1494             return new Size[0];
1495         }
1496 
1497         SparseIntArray formatsMap =
1498                 !output ? mInputFormats :
1499                 dataspace == HAL_DATASPACE_DEPTH ? mDepthOutputFormats :
1500                 dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ? mDynamicDepthOutputFormats :
1501                 dataspace == HAL_DATASPACE_HEIF ? mHeicOutputFormats :
1502                 highRes ? mHighResOutputFormats :
1503                 mOutputFormats;
1504 
1505         int sizesCount = formatsMap.get(format);
1506         if ( ((!output || (dataspace == HAL_DATASPACE_DEPTH ||
1507                             dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ||
1508                             dataspace == HAL_DATASPACE_HEIF)) && sizesCount == 0) ||
1509                 (output && (dataspace != HAL_DATASPACE_DEPTH &&
1510                             dataspace != HAL_DATASPACE_DYNAMIC_DEPTH &&
1511                             dataspace != HAL_DATASPACE_HEIF) &&
1512                  mAllOutputFormats.get(format) == 0)) {
1513             return null;
1514         }
1515 
1516         Size[] sizes = new Size[sizesCount];
1517         int sizeIndex = 0;
1518 
1519         StreamConfiguration[] configurations =
1520                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations :
1521                 (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthConfigurations :
1522                 (dataspace == HAL_DATASPACE_HEIF) ? mHeicConfigurations :
1523                 mConfigurations;
1524         StreamConfigurationDuration[] minFrameDurations =
1525                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations :
1526                 (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthMinFrameDurations :
1527                 (dataspace == HAL_DATASPACE_HEIF) ? mHeicMinFrameDurations :
1528                 mMinFrameDurations;
1529 
1530         for (StreamConfiguration config : configurations) {
1531             int fmt = config.getFormat();
1532             if (fmt == format && config.isOutput() == output) {
1533                 if (output && mListHighResolution) {
1534                     // Filter slow high-res output formats; include for
1535                     // highRes, remove for !highRes
1536                     long duration = 0;
1537                     for (int i = 0; i < minFrameDurations.length; i++) {
1538                         StreamConfigurationDuration d = minFrameDurations[i];
1539                         if (d.getFormat() == fmt &&
1540                                 d.getWidth() == config.getSize().getWidth() &&
1541                                 d.getHeight() == config.getSize().getHeight()) {
1542                             duration = d.getDuration();
1543                             break;
1544                         }
1545                     }
1546                     if (dataspace != HAL_DATASPACE_DEPTH &&
1547                             highRes != (duration > DURATION_20FPS_NS)) {
1548                         continue;
1549                     }
1550                 }
1551                 sizes[sizeIndex++] = config.getSize();
1552             }
1553         }
1554 
1555         // Dynamic depth streams can have both fast and also high res modes.
1556         if ((sizeIndex != sizesCount) && (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH ||
1557                 dataspace == HAL_DATASPACE_HEIF)) {
1558 
1559             if (sizeIndex > sizesCount) {
1560                 throw new AssertionError(
1561                         "Too many dynamic depth sizes (expected " + sizesCount + ", actual " +
1562                         sizeIndex + ")");
1563             }
1564 
1565             if (sizeIndex <= 0) {
1566                 sizes = new Size[0];
1567             } else {
1568                 sizes = Arrays.copyOf(sizes, sizeIndex);
1569             }
1570         } else if (sizeIndex != sizesCount) {
1571             throw new AssertionError(
1572                     "Too few sizes (expected " + sizesCount + ", actual " + sizeIndex + ")");
1573         }
1574 
1575         return sizes;
1576     }
1577 
1578     /** Get the list of publicly visible output formats */
getPublicFormats(boolean output)1579     private int[] getPublicFormats(boolean output) {
1580         int[] formats = new int[getPublicFormatCount(output)];
1581 
1582         int i = 0;
1583 
1584         SparseIntArray map = getFormatsMap(output);
1585         for (int j = 0; j < map.size(); j++) {
1586             int format = map.keyAt(j);
1587             formats[i++] = imageFormatToPublic(format);
1588         }
1589         if (output) {
1590             for (int j = 0; j < mDepthOutputFormats.size(); j++) {
1591                 formats[i++] = depthFormatToPublic(mDepthOutputFormats.keyAt(j));
1592             }
1593             if (mDynamicDepthOutputFormats.size() > 0) {
1594                 // Only one publicly dynamic depth format is available.
1595                 formats[i++] = ImageFormat.DEPTH_JPEG;
1596             }
1597             if (mHeicOutputFormats.size() > 0) {
1598                 formats[i++] = ImageFormat.HEIC;
1599             }
1600         }
1601         if (formats.length != i) {
1602             throw new AssertionError("Too few formats " + i + ", expected " + formats.length);
1603         }
1604 
1605         return formats;
1606     }
1607 
1608     /** Get the format -> size count map for either output or input formats */
getFormatsMap(boolean output)1609     private SparseIntArray getFormatsMap(boolean output) {
1610         return output ? mAllOutputFormats : mInputFormats;
1611     }
1612 
getInternalFormatDuration(int format, int dataspace, Size size, int duration)1613     private long getInternalFormatDuration(int format, int dataspace, Size size, int duration) {
1614         // assume format is already checked, since its internal
1615 
1616         if (!isSupportedInternalConfiguration(format, dataspace, size)) {
1617             throw new IllegalArgumentException("size was not supported");
1618         }
1619 
1620         StreamConfigurationDuration[] durations = getDurations(duration, dataspace);
1621 
1622         for (StreamConfigurationDuration configurationDuration : durations) {
1623             if (configurationDuration.getFormat() == format &&
1624                     configurationDuration.getWidth() == size.getWidth() &&
1625                     configurationDuration.getHeight() == size.getHeight()) {
1626                 return configurationDuration.getDuration();
1627             }
1628         }
1629         // Default duration is '0' (unsupported/no extra stall)
1630         return 0;
1631     }
1632 
1633     /**
1634      * Get the durations array for the kind of duration
1635      *
1636      * @see #DURATION_MIN_FRAME
1637      * @see #DURATION_STALL
1638      * */
getDurations(int duration, int dataspace)1639     private StreamConfigurationDuration[] getDurations(int duration, int dataspace) {
1640         switch (duration) {
1641             case DURATION_MIN_FRAME:
1642                 return (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations :
1643                         (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ?
1644                         mDynamicDepthMinFrameDurations :
1645                         (dataspace == HAL_DATASPACE_HEIF) ? mHeicMinFrameDurations :
1646                         mMinFrameDurations;
1647 
1648             case DURATION_STALL:
1649                 return (dataspace == HAL_DATASPACE_DEPTH) ? mDepthStallDurations :
1650                         (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthStallDurations :
1651                         (dataspace == HAL_DATASPACE_HEIF) ? mHeicStallDurations :
1652                         mStallDurations;
1653             default:
1654                 throw new IllegalArgumentException("duration was invalid");
1655         }
1656     }
1657 
1658     /** Count the number of publicly-visible output formats */
getPublicFormatCount(boolean output)1659     private int getPublicFormatCount(boolean output) {
1660         SparseIntArray formatsMap = getFormatsMap(output);
1661         int size = formatsMap.size();
1662         if (output) {
1663             size += mDepthOutputFormats.size();
1664             size += mDynamicDepthOutputFormats.size();
1665             size += mHeicOutputFormats.size();
1666         }
1667 
1668         return size;
1669     }
1670 
arrayContains(T[] array, T element)1671     private static <T> boolean arrayContains(T[] array, T element) {
1672         if (array == null) {
1673             return false;
1674         }
1675 
1676         for (T el : array) {
1677             if (Objects.equals(el, element)) {
1678                 return true;
1679             }
1680         }
1681 
1682         return false;
1683     }
1684 
isSupportedInternalConfiguration(int format, int dataspace, Size size)1685     private boolean isSupportedInternalConfiguration(int format, int dataspace, Size size) {
1686         StreamConfiguration[] configurations =
1687                 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations :
1688                 (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH) ? mDynamicDepthConfigurations :
1689                 (dataspace == HAL_DATASPACE_HEIF) ? mHeicConfigurations :
1690                 mConfigurations;
1691 
1692         for (int i = 0; i < configurations.length; i++) {
1693             if (configurations[i].getFormat() == format &&
1694                     configurations[i].getSize().equals(size)) {
1695                 return true;
1696             }
1697         }
1698 
1699         return false;
1700     }
1701 
1702     /**
1703      * Return this {@link StreamConfigurationMap} as a string representation.
1704      *
1705      * <p>{@code "StreamConfigurationMap(Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d,
1706      * stall:%d], ... [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]), Inputs([w:%d, h:%d,
1707      * format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)]), ValidOutputFormatsForInput(
1708      * [in:%d, out:%d, ... %d], ... [in:%d, out:%d, ... %d]), HighSpeedVideoConfigurations(
1709      * [w:%d, h:%d, min_fps:%d, max_fps:%d], ... [w:%d, h:%d, min_fps:%d, max_fps:%d]))"}.</p>
1710      *
1711      * <p>{@code Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d], ...
1712      * [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d])}, where
1713      * {@code [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]} represents an output
1714      * configuration's width, height, format, minimal frame duration in nanoseconds, and stall
1715      * duration in nanoseconds.</p>
1716      *
1717      * <p>{@code Inputs([w:%d, h:%d, format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)])}, where
1718      * {@code [w:%d, h:%d, format:%s(%d)]} represents an input configuration's width, height, and
1719      * format.</p>
1720      *
1721      * <p>{@code ValidOutputFormatsForInput([in:%s(%d), out:%s(%d), ... %s(%d)],
1722      * ... [in:%s(%d), out:%s(%d), ... %s(%d)])}, where {@code [in:%s(%d), out:%s(%d), ... %s(%d)]}
1723      * represents an input fomat and its valid output formats.</p>
1724      *
1725      * <p>{@code HighSpeedVideoConfigurations([w:%d, h:%d, min_fps:%d, max_fps:%d],
1726      * ... [w:%d, h:%d, min_fps:%d, max_fps:%d])}, where
1727      * {@code [w:%d, h:%d, min_fps:%d, max_fps:%d]} represents a high speed video output
1728      * configuration's width, height, minimal frame rate, and maximal frame rate.</p>
1729      *
1730      * @return string representation of {@link StreamConfigurationMap}
1731      */
1732     @Override
toString()1733     public String toString() {
1734         StringBuilder sb = new StringBuilder("StreamConfiguration(");
1735         appendOutputsString(sb);
1736         sb.append(", ");
1737         appendHighResOutputsString(sb);
1738         sb.append(", ");
1739         appendInputsString(sb);
1740         sb.append(", ");
1741         appendValidOutputFormatsForInputString(sb);
1742         sb.append(", ");
1743         appendHighSpeedVideoConfigurationsString(sb);
1744         sb.append(")");
1745 
1746         return sb.toString();
1747     }
1748 
1749     /**
1750      * Size comparison method used by size comparators.
1751      *
1752      * @hide
1753      */
compareSizes(int widthA, int heightA, int widthB, int heightB)1754     public static int compareSizes(int widthA, int heightA, int widthB, int heightB) {
1755         long left = widthA * (long) heightA;
1756         long right = widthB * (long) heightB;
1757         if (left == right) {
1758             left = widthA;
1759             right = widthB;
1760         }
1761         return (left < right) ? -1 : (left > right ? 1 : 0);
1762     }
1763 
appendOutputsString(StringBuilder sb)1764     private void appendOutputsString(StringBuilder sb) {
1765         sb.append("Outputs(");
1766         int[] formats = getOutputFormats();
1767         for (int format : formats) {
1768             Size[] sizes = getOutputSizes(format);
1769             for (Size size : sizes) {
1770                 long minFrameDuration = getOutputMinFrameDuration(format, size);
1771                 long stallDuration = getOutputStallDuration(format, size);
1772                 sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
1773                         "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
1774                         format, minFrameDuration, stallDuration));
1775             }
1776         }
1777         // Remove the pending ", "
1778         if (sb.charAt(sb.length() - 1) == ' ') {
1779             sb.delete(sb.length() - 2, sb.length());
1780         }
1781         sb.append(")");
1782     }
1783 
appendHighResOutputsString(StringBuilder sb)1784     private void appendHighResOutputsString(StringBuilder sb) {
1785         sb.append("HighResolutionOutputs(");
1786         int[] formats = getOutputFormats();
1787         for (int format : formats) {
1788             Size[] sizes = getHighResolutionOutputSizes(format);
1789             if (sizes == null) continue;
1790             for (Size size : sizes) {
1791                 long minFrameDuration = getOutputMinFrameDuration(format, size);
1792                 long stallDuration = getOutputStallDuration(format, size);
1793                 sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
1794                         "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
1795                         format, minFrameDuration, stallDuration));
1796             }
1797         }
1798         // Remove the pending ", "
1799         if (sb.charAt(sb.length() - 1) == ' ') {
1800             sb.delete(sb.length() - 2, sb.length());
1801         }
1802         sb.append(")");
1803     }
1804 
appendInputsString(StringBuilder sb)1805     private void appendInputsString(StringBuilder sb) {
1806         sb.append("Inputs(");
1807         int[] formats = getInputFormats();
1808         for (int format : formats) {
1809             Size[] sizes = getInputSizes(format);
1810             for (Size size : sizes) {
1811                 sb.append(String.format("[w:%d, h:%d, format:%s(%d)], ", size.getWidth(),
1812                         size.getHeight(), formatToString(format), format));
1813             }
1814         }
1815         // Remove the pending ", "
1816         if (sb.charAt(sb.length() - 1) == ' ') {
1817             sb.delete(sb.length() - 2, sb.length());
1818         }
1819         sb.append(")");
1820     }
1821 
appendValidOutputFormatsForInputString(StringBuilder sb)1822     private void appendValidOutputFormatsForInputString(StringBuilder sb) {
1823         sb.append("ValidOutputFormatsForInput(");
1824         int[] inputFormats = getInputFormats();
1825         for (int inputFormat : inputFormats) {
1826             sb.append(String.format("[in:%s(%d), out:", formatToString(inputFormat), inputFormat));
1827             int[] outputFormats = getValidOutputFormatsForInput(inputFormat);
1828             for (int i = 0; i < outputFormats.length; i++) {
1829                 sb.append(String.format("%s(%d)", formatToString(outputFormats[i]),
1830                         outputFormats[i]));
1831                 if (i < outputFormats.length - 1) {
1832                     sb.append(", ");
1833                 }
1834             }
1835             sb.append("], ");
1836         }
1837         // Remove the pending ", "
1838         if (sb.charAt(sb.length() - 1) == ' ') {
1839             sb.delete(sb.length() - 2, sb.length());
1840         }
1841         sb.append(")");
1842     }
1843 
appendHighSpeedVideoConfigurationsString(StringBuilder sb)1844     private void appendHighSpeedVideoConfigurationsString(StringBuilder sb) {
1845         sb.append("HighSpeedVideoConfigurations(");
1846         Size[] sizes = getHighSpeedVideoSizes();
1847         for (Size size : sizes) {
1848             Range<Integer>[] ranges = getHighSpeedVideoFpsRangesFor(size);
1849             for (Range<Integer> range : ranges) {
1850                 sb.append(String.format("[w:%d, h:%d, min_fps:%d, max_fps:%d], ", size.getWidth(),
1851                         size.getHeight(), range.getLower(), range.getUpper()));
1852             }
1853         }
1854         // Remove the pending ", "
1855         if (sb.charAt(sb.length() - 1) == ' ') {
1856             sb.delete(sb.length() - 2, sb.length());
1857         }
1858         sb.append(")");
1859     }
1860 
1861     /**
1862      * @hide
1863      */
formatToString(int format)1864     public static String formatToString(int format) {
1865         switch (format) {
1866             case ImageFormat.YV12:
1867                 return "YV12";
1868             case ImageFormat.YUV_420_888:
1869                 return "YUV_420_888";
1870             case ImageFormat.NV21:
1871                 return "NV21";
1872             case ImageFormat.NV16:
1873                 return "NV16";
1874             case PixelFormat.RGB_565:
1875                 return "RGB_565";
1876             case PixelFormat.RGBA_8888:
1877                 return "RGBA_8888";
1878             case PixelFormat.RGBX_8888:
1879                 return "RGBX_8888";
1880             case PixelFormat.RGB_888:
1881                 return "RGB_888";
1882             case ImageFormat.JPEG:
1883                 return "JPEG";
1884             case ImageFormat.YUY2:
1885                 return "YUY2";
1886             case ImageFormat.Y8:
1887                 return "Y8";
1888             case ImageFormat.Y16:
1889                 return "Y16";
1890             case ImageFormat.RAW_SENSOR:
1891                 return "RAW_SENSOR";
1892             case ImageFormat.RAW_PRIVATE:
1893                 return "RAW_PRIVATE";
1894             case ImageFormat.RAW10:
1895                 return "RAW10";
1896             case ImageFormat.DEPTH16:
1897                 return "DEPTH16";
1898             case ImageFormat.DEPTH_POINT_CLOUD:
1899                 return "DEPTH_POINT_CLOUD";
1900             case ImageFormat.DEPTH_JPEG:
1901                 return "DEPTH_JPEG";
1902             case ImageFormat.RAW_DEPTH:
1903                 return "RAW_DEPTH";
1904             case ImageFormat.RAW_DEPTH10:
1905                 return "RAW_DEPTH10";
1906             case ImageFormat.PRIVATE:
1907                 return "PRIVATE";
1908             case ImageFormat.HEIC:
1909                 return "HEIC";
1910             default:
1911                 return "UNKNOWN";
1912         }
1913     }
1914 
1915     // from system/core/include/system/graphics.h
1916     private static final int HAL_PIXEL_FORMAT_RAW16 = 0x20;
1917     /** @hide */
1918     public static final int HAL_PIXEL_FORMAT_BLOB = 0x21;
1919     private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
1920     private static final int HAL_PIXEL_FORMAT_YCbCr_420_888 = 0x23;
1921     private static final int HAL_PIXEL_FORMAT_RAW_OPAQUE = 0x24;
1922     private static final int HAL_PIXEL_FORMAT_RAW10 = 0x25;
1923     private static final int HAL_PIXEL_FORMAT_RAW12 = 0x26;
1924     private static final int HAL_PIXEL_FORMAT_Y16 = 0x20363159;
1925 
1926 
1927     private static final int HAL_DATASPACE_STANDARD_SHIFT = 16;
1928     private static final int HAL_DATASPACE_TRANSFER_SHIFT = 22;
1929     private static final int HAL_DATASPACE_RANGE_SHIFT = 27;
1930 
1931     private static final int HAL_DATASPACE_UNKNOWN = 0x0;
1932     /** @hide */
1933     public static final int HAL_DATASPACE_V0_JFIF =
1934             (2 << HAL_DATASPACE_STANDARD_SHIFT) |
1935             (3 << HAL_DATASPACE_TRANSFER_SHIFT) |
1936             (1 << HAL_DATASPACE_RANGE_SHIFT);
1937 
1938     /**
1939      * @hide
1940      */
1941     public static final int HAL_DATASPACE_DEPTH = 0x1000;
1942     /**
1943      * @hide
1944      */
1945     public static final int HAL_DATASPACE_DYNAMIC_DEPTH = 0x1002;
1946     /**
1947      * @hide
1948      */
1949     public static final int HAL_DATASPACE_HEIF = 0x1003;
1950     private static final long DURATION_20FPS_NS = 50000000L;
1951     /**
1952      * @see #getDurations(int, int)
1953      */
1954     private static final int DURATION_MIN_FRAME = 0;
1955     private static final int DURATION_STALL = 1;
1956 
1957     private final StreamConfiguration[] mConfigurations;
1958     private final StreamConfigurationDuration[] mMinFrameDurations;
1959     private final StreamConfigurationDuration[] mStallDurations;
1960 
1961     private final StreamConfiguration[] mDepthConfigurations;
1962     private final StreamConfigurationDuration[] mDepthMinFrameDurations;
1963     private final StreamConfigurationDuration[] mDepthStallDurations;
1964 
1965     private final StreamConfiguration[] mDynamicDepthConfigurations;
1966     private final StreamConfigurationDuration[] mDynamicDepthMinFrameDurations;
1967     private final StreamConfigurationDuration[] mDynamicDepthStallDurations;
1968 
1969     private final StreamConfiguration[] mHeicConfigurations;
1970     private final StreamConfigurationDuration[] mHeicMinFrameDurations;
1971     private final StreamConfigurationDuration[] mHeicStallDurations;
1972 
1973     private final HighSpeedVideoConfiguration[] mHighSpeedVideoConfigurations;
1974     private final ReprocessFormatsMap mInputOutputFormatsMap;
1975 
1976     private final boolean mListHighResolution;
1977 
1978     /** internal format -> num output sizes mapping, not including slow high-res sizes, for
1979      * non-depth dataspaces */
1980     private final SparseIntArray mOutputFormats = new SparseIntArray();
1981     /** internal format -> num output sizes mapping for slow high-res sizes, for non-depth
1982      * dataspaces */
1983     private final SparseIntArray mHighResOutputFormats = new SparseIntArray();
1984     /** internal format -> num output sizes mapping for all non-depth dataspaces */
1985     private final SparseIntArray mAllOutputFormats = new SparseIntArray();
1986     /** internal format -> num input sizes mapping, for input reprocessing formats */
1987     private final SparseIntArray mInputFormats = new SparseIntArray();
1988     /** internal format -> num depth output sizes mapping, for HAL_DATASPACE_DEPTH */
1989     private final SparseIntArray mDepthOutputFormats = new SparseIntArray();
1990     /** internal format -> num dynamic depth output sizes mapping, for HAL_DATASPACE_DYNAMIC_DEPTH */
1991     private final SparseIntArray mDynamicDepthOutputFormats = new SparseIntArray();
1992     /** internal format -> num heic output sizes mapping, for HAL_DATASPACE_HEIF */
1993     private final SparseIntArray mHeicOutputFormats = new SparseIntArray();
1994 
1995     /** High speed video Size -> FPS range count mapping*/
1996     private final HashMap</*HighSpeedVideoSize*/Size, /*Count*/Integer> mHighSpeedVideoSizeMap =
1997             new HashMap<Size, Integer>();
1998     /** High speed video FPS range -> Size count mapping*/
1999     private final HashMap</*HighSpeedVideoFpsRange*/Range<Integer>, /*Count*/Integer>
2000             mHighSpeedVideoFpsRangeMap = new HashMap<Range<Integer>, Integer>();
2001 
2002 }
2003