• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 
18 package android.hardware.camera2.params;
19 
20 import static com.android.internal.util.Preconditions.*;
21 
22 import android.annotation.FlaggedApi;
23 import android.annotation.IntDef;
24 import android.annotation.NonNull;
25 import android.annotation.Nullable;
26 import android.annotation.SuppressLint;
27 import android.annotation.SystemApi;
28 import android.annotation.TestApi;
29 import android.graphics.ColorSpace;
30 import android.graphics.ImageFormat;
31 import android.graphics.ImageFormat.Format;
32 import android.hardware.DataSpace.NamedDataSpace;
33 import android.hardware.HardwareBuffer;
34 import android.hardware.HardwareBuffer.Usage;
35 import android.hardware.camera2.CameraCaptureSession;
36 import android.hardware.camera2.CameraCharacteristics;
37 import android.hardware.camera2.CameraDevice;
38 import android.hardware.camera2.CameraMetadata;
39 import android.hardware.camera2.MultiResolutionImageReader;
40 import android.hardware.camera2.params.DynamicRangeProfiles;
41 import android.hardware.camera2.params.MultiResolutionStreamInfo;
42 import android.hardware.camera2.utils.HashCodeHelpers;
43 import android.hardware.camera2.utils.SurfaceUtils;
44 import android.media.ImageReader;
45 import android.os.Parcel;
46 import android.os.Parcelable;
47 import android.util.IntArray;
48 import android.util.Log;
49 import android.util.Size;
50 import android.view.Surface;
51 
52 import com.android.internal.camera.flags.Flags;
53 
54 import java.lang.annotation.Retention;
55 import java.lang.annotation.RetentionPolicy;
56 import java.util.ArrayList;
57 import java.util.Collection;
58 import java.util.Collections;
59 import java.util.List;
60 import java.util.Objects;
61 import java.util.concurrent.atomic.AtomicInteger;
62 
63 /**
64  * A class for describing camera output, which contains a {@link Surface} and its specific
65  * configuration for creating capture session.
66  *
67  * <p>There are several ways to instantiate, modify and use OutputConfigurations. The most common
68  * and recommended usage patterns are summarized in the following list:</p>
69  *<ul>
70  * <li>Passing a {@link Surface} to the constructor and using the OutputConfiguration instance as
71  * argument to {@link CameraDevice#createCaptureSessionByOutputConfigurations}. This is the most
72  * frequent usage and clients should consider it first before other more complicated alternatives.
73  * </li>
74  *
75  * <li>Passing only a surface source class as an argument to the constructor. This is usually
76  * followed by a call to create a capture session
77  * (see {@link CameraDevice#createCaptureSessionByOutputConfigurations} and a {@link Surface} add
78  * call {@link #addSurface} with a valid {@link Surface}. The sequence completes with
79  * {@link CameraCaptureSession#finalizeOutputConfigurations}. This is the deferred usage case which
80  * aims to enhance performance by allowing the resource-intensive capture session create call to
81  * execute in parallel with any {@link Surface} initialization, such as waiting for a
82  * {@link android.view.SurfaceView} to be ready as part of the UI initialization.</li>
83  *
84  * <li>The third and most complex usage pattern involves surface sharing. Once instantiated an
85  * OutputConfiguration can be enabled for surface sharing via {@link #enableSurfaceSharing}. This
86  * must be done before creating a new capture session and enables calls to
87  * {@link CameraCaptureSession#updateOutputConfiguration}. An OutputConfiguration with enabled
88  * surface sharing can be modified via {@link #addSurface} or {@link #removeSurface}. The updates
89  * to this OutputConfiguration will only come into effect after
90  * {@link CameraCaptureSession#updateOutputConfiguration} returns without throwing exceptions.
91  * Such updates can be done as long as the session is active. Clients should always consider the
92  * additional requirements and limitations placed on the output surfaces (for more details see
93  * {@link #enableSurfaceSharing}, {@link #addSurface}, {@link #removeSurface},
94  * {@link CameraCaptureSession#updateOutputConfiguration}). A trade-off exists between additional
95  * complexity and flexibility. If exercised correctly surface sharing can switch between different
96  * output surfaces without interrupting any ongoing repeating capture requests. This saves time and
97  * can significantly improve the user experience.</li>
98  *
99  * <li>Surface sharing can be used in combination with deferred surfaces. The rules from both cases
100  * are combined and clients must call {@link #enableSurfaceSharing} before creating a capture
101  * session. Attach and/or remove output surfaces via  {@link #addSurface}/{@link #removeSurface} and
102  * finalize the configuration using {@link CameraCaptureSession#finalizeOutputConfigurations}.
103  * {@link CameraCaptureSession#updateOutputConfiguration} can be called after the configuration
104  * finalize method returns without exceptions.</li>
105  *
106  * <li>If the camera device supports multi-resolution output streams, {@link
107  * CameraCharacteristics#SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP} will contain the
108  * formats and their corresponding stream info. The application can use an OutputConfiguration
109  * created with the multi-resolution stream info queried from {@link
110  * MultiResolutionStreamConfigurationMap#getOutputInfo} and
111  * {@link android.hardware.camera2.MultiResolutionImageReader} to capture variable size images.
112  *
113  * </ul>
114  *
115  * <p> As of {@link android.os.Build.VERSION_CODES#P Android P}, all formats except
116  * {@link ImageFormat#JPEG} and {@link ImageFormat#RAW_PRIVATE} can be used for sharing, subject to
117  * device support. On prior API levels, only {@link ImageFormat#PRIVATE} format may be used.</p>
118  *
119  * @see CameraDevice#createCaptureSessionByOutputConfigurations
120  * @see CameraCharacteristics#SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP
121  *
122  */
123 public final class OutputConfiguration implements Parcelable {
124 
125     /**
126      * Rotation constant: 0 degree rotation (no rotation)
127      *
128      * @hide
129      */
130     @SystemApi
131     public static final int ROTATION_0 = 0;
132 
133     /**
134      * Rotation constant: 90 degree counterclockwise rotation.
135      *
136      * @hide
137      */
138     @SystemApi
139     public static final int ROTATION_90 = 1;
140 
141     /**
142      * Rotation constant: 180 degree counterclockwise rotation.
143      *
144      * @hide
145      */
146     @SystemApi
147     public static final int ROTATION_180 = 2;
148 
149     /**
150      * Rotation constant: 270 degree counterclockwise rotation.
151      *
152      * @hide
153      */
154     @SystemApi
155     public static final int ROTATION_270 = 3;
156 
157     /**
158      * Invalid surface group ID.
159      *
160      *<p>An {@link OutputConfiguration} with this value indicates that the included surface
161      *doesn't belong to any surface group.</p>
162      */
163     public static final int SURFACE_GROUP_ID_NONE = -1;
164 
165     /**
166      * Default timestamp base.
167      *
168      * <p>The camera device decides the timestamp based on the properties of the
169      * output surface.</p>
170      *
171      * <li> For a SurfaceView output surface, the timestamp base is {@link
172      * #TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED}. The timestamp is overridden with choreographer
173      * pulses from the display subsystem for smoother display of camera frames when the camera
174      * device runs in fixed frame rate. The timestamp is roughly in the same time base as
175      * {@link android.os.SystemClock#uptimeMillis}.</li>
176      * <li> For an output surface of MediaRecorder, MediaCodec, or ImageReader with {@link
177      * android.hardware.HardwareBuffer#USAGE_VIDEO_ENCODE} usage flag, the timestamp base is
178      * {@link #TIMESTAMP_BASE_MONOTONIC}, which is roughly the same time base as
179      * {@link android.os.SystemClock#uptimeMillis}.</li>
180      * <li> For all other cases, the timestamp base is {@link #TIMESTAMP_BASE_SENSOR}, the same
181      * as what's specified by {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}.
182      * <ul><li> For a SurfaceTexture output surface, the camera system re-spaces the delivery
183      * of output frames based on image readout intervals, reducing viewfinder jitter. The timestamps
184      * of images remain to be {@link #TIMESTAMP_BASE_SENSOR}.</li></ul></li>
185      *
186      * <p>Note that the reduction of frame jitter for SurfaceView and SurfaceTexture comes with
187      * slight increase in photon-to-photon latency, which is the time from when photons hit the
188      * scene to when the corresponding pixels show up on the screen. If the photon-to-photon latency
189      * is more important than the smoothness of viewfinder, {@link #TIMESTAMP_BASE_SENSOR} should be
190      * used instead.</p>
191      *
192      * @see #TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED
193      * @see #TIMESTAMP_BASE_MONOTONIC
194      * @see #TIMESTAMP_BASE_SENSOR
195      */
196     public static final int TIMESTAMP_BASE_DEFAULT = 0;
197 
198     /**
199      * Timestamp base of {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}.
200      *
201      * <p>The timestamps of the output images are in the time base as specified by {@link
202      * CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}. The application can look up the
203      * corresponding result metadata by matching the timestamp with a {@link
204      * CameraCaptureSession.CaptureCallback#onCaptureStarted}, or with a {@link
205      * CameraCaptureSession.CaptureCallback#onReadoutStarted} if readout timestamp is used.</p>
206      */
207     public static final int TIMESTAMP_BASE_SENSOR = 1;
208 
209     /**
210      * Timestamp base roughly the same as {@link android.os.SystemClock#uptimeMillis}.
211      *
212      * <p>The timestamps of the output images are monotonically increasing, and are roughly in the
213      * same time base as {@link android.os.SystemClock#uptimeMillis}. The timestamps with this
214      * time base can be directly used for audio-video sync in video recording.</p>
215      *
216      * <p>If the camera device's {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE} is
217      * REALTIME, timestamps with this time base cannot directly match the timestamps in
218      * {@link CameraCaptureSession.CaptureCallback#onCaptureStarted}, {@link
219      * CameraCaptureSession.CaptureCallback#onReadoutStarted}, or the sensor timestamps in
220      * {@link android.hardware.camera2.CaptureResult}.</p>
221      */
222     public static final int TIMESTAMP_BASE_MONOTONIC = 2;
223 
224     /**
225      * Timestamp base roughly the same as {@link android.os.SystemClock#elapsedRealtime}.
226      *
227      * <p>The timestamps of the output images are roughly in the
228      * same time base as {@link android.os.SystemClock#elapsedRealtime}. The timestamps with this
229      * time base cannot be directly used for audio-video sync in video recording.</p>
230      *
231      * <p>If the camera device's {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE} is
232      * UNKNOWN, timestamps with this time base cannot directly match the timestamps in
233      * {@link CameraCaptureSession.CaptureCallback#onCaptureStarted}, {@link
234      * CameraCaptureSession.CaptureCallback#onReadoutStarted}, or the sensor timestamps in
235      * {@link android.hardware.camera2.CaptureResult}.</p>
236      *
237      * <p>If using a REALTIME timestamp base on a device that supports only
238      * TIMESTAMP_SOURCE_UNKNOWN, the accuracy of timestamps is only what is guaranteed in the
239      * documentation for UNKNOWN. In particular, they have no guarantees about being accurate
240      * enough to use in fusing image data with the output of inertial sensors, for features such as
241      * image stabilization or augmented reality.</p>
242      */
243     public static final int TIMESTAMP_BASE_REALTIME = 3;
244 
245     /**
246      * Timestamp is synchronized to choreographer.
247      *
248      * <p>The timestamp of the output images are overridden with choreographer pulses from the
249      * display subsystem for smoother display of camera frames. An output target of SurfaceView
250      * uses this time base by default. Note that the timestamp override is done for fixed camera
251      * frame rate only.</p>
252      *
253      * <p>This timestamp base isn't applicable to SurfaceTexture targets. SurfaceTexture's
254      * {@link android.graphics.SurfaceTexture#updateTexImage updateTexImage} function always
255      * uses the latest image from the camera stream. In the case of a TextureView, the image is
256      * displayed right away.</p>
257      *
258      * <p>Timestamps with this time base cannot directly match the timestamps in
259      * {@link CameraCaptureSession.CaptureCallback#onCaptureStarted}, {@link
260      * CameraCaptureSession.CaptureCallback#onReadoutStarted}, or the sensor timestamps in
261      * {@link android.hardware.camera2.CaptureResult}. This timestamp base shouldn't be used if the
262      * timestamp needs to be used for audio-video synchronization.</p>
263      */
264     public static final int TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED = 4;
265 
266     /**
267      * Timestamp is the start of readout in the same time domain as TIMESTAMP_BASE_SENSOR.
268      *
269      * <p>NOTE: do not use! Use setReadoutTimestampEnabled instead.</p>
270      *
271      * @hide
272      */
273     public static final int TIMESTAMP_BASE_READOUT_SENSOR = 5;
274 
275     /** @hide */
276     @Retention(RetentionPolicy.SOURCE)
277     @IntDef(prefix = {"TIMESTAMP_BASE_"}, value =
278         {TIMESTAMP_BASE_DEFAULT,
279          TIMESTAMP_BASE_SENSOR,
280          TIMESTAMP_BASE_MONOTONIC,
281          TIMESTAMP_BASE_REALTIME,
282          TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED,
283          TIMESTAMP_BASE_READOUT_SENSOR})
284     public @interface TimestampBase {};
285 
286     /** @hide */
287      @Retention(RetentionPolicy.SOURCE)
288      @IntDef(prefix = {"SENSOR_PIXEL_MODE_"}, value =
289          {CameraMetadata.SENSOR_PIXEL_MODE_DEFAULT,
290           CameraMetadata.SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION})
291      public @interface SensorPixelMode {};
292 
293     /** @hide */
294     @Retention(RetentionPolicy.SOURCE)
295     @IntDef(prefix = {"STREAM_USE_CASE_"}, value =
296         {CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
297          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
298          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
299          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
300          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
301          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL,
302          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW})
303     public @interface StreamUseCase {};
304 
305     /**
306      * Automatic mirroring based on camera facing
307      *
308      * <p>This is the default mirroring mode for the camera device. With this mode,
309      * the camera output is mirrored horizontally for front-facing cameras. There is
310      * no mirroring for rear-facing and external cameras.</p>
311      */
312     public static final int MIRROR_MODE_AUTO = 0;
313 
314     /**
315      * No mirror transform is applied
316      *
317      * <p>No mirroring is applied to the camera output regardless of the camera facing.</p>
318      */
319     public static final int MIRROR_MODE_NONE = 1;
320 
321     /**
322      * Camera output is mirrored horizontally
323      *
324      * <p>The camera output is mirrored horizontally, the same behavior as in AUTO mode for
325      * front facing camera.</p>
326      */
327     public static final int MIRROR_MODE_H = 2;
328 
329     /**
330      * Camera output is mirrored vertically
331      */
332     public static final int MIRROR_MODE_V = 3;
333 
334     /** @hide */
335     @Retention(RetentionPolicy.SOURCE)
336     @IntDef(prefix = {"MIRROR_MODE_"}, value =
337         {MIRROR_MODE_AUTO,
338           MIRROR_MODE_NONE,
339           MIRROR_MODE_H,
340           MIRROR_MODE_V})
341     public @interface MirrorMode {};
342 
343     /**
344      * Create a new {@link OutputConfiguration} instance with a {@link Surface}.
345      *
346      * @param surface
347      *          A Surface for camera to output to.
348      *
349      * <p>This constructor creates a default configuration, with a surface group ID of
350      * {@value #SURFACE_GROUP_ID_NONE}.</p>
351      *
352      */
OutputConfiguration(@onNull Surface surface)353     public OutputConfiguration(@NonNull Surface surface) {
354         this(SURFACE_GROUP_ID_NONE, surface, ROTATION_0);
355     }
356 
357     /**
358      * Unknown surface source type.
359      */
360     private final int SURFACE_TYPE_UNKNOWN = -1;
361 
362     /**
363      * The surface is obtained from {@link android.view.SurfaceView}.
364      */
365     private final int SURFACE_TYPE_SURFACE_VIEW = 0;
366 
367     /**
368      * The surface is obtained from {@link android.graphics.SurfaceTexture}.
369      */
370     private final int SURFACE_TYPE_SURFACE_TEXTURE = 1;
371 
372     /**
373      * The surface is obtained from {@link android.media.MediaRecorder}.
374      */
375     private static final int SURFACE_TYPE_MEDIA_RECORDER = 2;
376 
377     /**
378      * The surface is obtained from {@link android.media.MediaCodec}.
379      */
380     private static final int SURFACE_TYPE_MEDIA_CODEC = 3;
381 
382     /**
383      * The surface is obtained from {@link android.media.ImageReader}.
384      */
385     private static final int SURFACE_TYPE_IMAGE_READER = 4;
386 
387     /**
388      * Maximum number of surfaces supported by one {@link OutputConfiguration}.
389      *
390      * <p>The combined number of surfaces added by the constructor and
391      * {@link OutputConfiguration#addSurface} should not exceed this value.</p>
392      *
393      */
394     private static final int MAX_SURFACES_COUNT = 4;
395 
396     /**
397      * Create a new {@link OutputConfiguration} instance with a {@link Surface},
398      * with a surface group ID.
399      *
400      * <p>
401      * A surface group ID is used to identify which surface group this output surface belongs to. A
402      * surface group is a group of output surfaces that are not intended to receive camera output
403      * buffer streams simultaneously. The {@link CameraDevice} may be able to share the buffers used
404      * by all the surfaces from the same surface group, therefore may reduce the overall memory
405      * footprint. The application should only set the same set ID for the streams that are not
406      * simultaneously streaming. A negative ID indicates that this surface doesn't belong to any
407      * surface group. The default value is {@value #SURFACE_GROUP_ID_NONE}.</p>
408      *
409      * <p>For example, a video chat application that has an adaptive output resolution feature would
410      * need two (or more) output resolutions, to switch resolutions without any output glitches.
411      * However, at any given time, only one output is active to minimize outgoing network bandwidth
412      * and encoding overhead.  To save memory, the application should set the video outputs to have
413      * the same non-negative group ID, so that the camera device can share the same memory region
414      * for the alternating outputs.</p>
415      *
416      * <p>It is not an error to include output streams with the same group ID in the same capture
417      * request, but the resulting memory consumption may be higher than if the two streams were
418      * not in the same surface group to begin with, especially if the outputs have substantially
419      * different dimensions.</p>
420      *
421      * @param surfaceGroupId
422      *          A group ID for this output, used for sharing memory between multiple outputs.
423      * @param surface
424      *          A Surface for camera to output to.
425      *
426      */
OutputConfiguration(int surfaceGroupId, @NonNull Surface surface)427     public OutputConfiguration(int surfaceGroupId, @NonNull Surface surface) {
428         this(surfaceGroupId, surface, ROTATION_0);
429     }
430 
431     /**
432      * Set the multi-resolution output flag.
433      *
434      * <p>Specify that this OutputConfiguration is part of a multi-resolution output stream group
435      * used by {@link android.hardware.camera2.MultiResolutionImageReader}.</p>
436      *
437      * <p>This function must only be called for an OutputConfiguration with a non-negative
438      * group ID. And all OutputConfigurations of a MultiResolutionImageReader will have the same
439      * group ID and have this flag set.</p>
440      *
441      * @throws IllegalStateException If surface sharing is enabled via {@link #enableSurfaceSharing}
442      *         call, or no non-negative group ID has been set.
443      * @hide
444      */
setMultiResolutionOutput()445     public void setMultiResolutionOutput() {
446         if (mIsShared) {
447             throw new IllegalStateException("Multi-resolution output flag must not be set for " +
448                     "configuration with surface sharing");
449         }
450         if (mSurfaceGroupId == SURFACE_GROUP_ID_NONE) {
451             throw new IllegalStateException("Multi-resolution output flag should only be set for " +
452                     "surface with non-negative group ID");
453         }
454 
455         mIsMultiResolution = true;
456     }
457 
458     /**
459      * Set a specific device supported dynamic range profile.
460      *
461      * <p>Clients can choose from any profile advertised as supported in
462      * CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES
463      * queried using {@link DynamicRangeProfiles#getSupportedProfiles()}.
464      * If this is not explicitly set, then the default profile will be
465      * {@link DynamicRangeProfiles#STANDARD}.</p>
466      *
467      * <p>Do note that invalid combinations between the registered output
468      * surface pixel format and the configured dynamic range profile will
469      * cause capture session initialization failure. Invalid combinations
470      * include any 10-bit dynamic range profile advertised in
471      * {@link DynamicRangeProfiles#getSupportedProfiles()} combined with
472      * an output Surface pixel format different from {@link ImageFormat#PRIVATE}
473      * (the default for Surfaces initialized by {@link android.view.SurfaceView},
474      * {@link android.view.TextureView}, {@link android.media.MediaRecorder},
475      * {@link android.media.MediaCodec} etc.)
476      * or {@link ImageFormat#YCBCR_P010}.</p>
477      */
setDynamicRangeProfile(@ynamicRangeProfiles.Profile long profile)478     public void setDynamicRangeProfile(@DynamicRangeProfiles.Profile long profile) {
479         mDynamicRangeProfile = profile;
480     }
481 
482     /**
483      * Return current dynamic range profile.
484      *
485      * @return the currently set dynamic range profile
486      */
getDynamicRangeProfile()487     public @DynamicRangeProfiles.Profile long getDynamicRangeProfile() {
488         return mDynamicRangeProfile;
489     }
490 
491     /**
492      * Set a specific device-supported color space.
493      *
494      * <p>Clients can choose from any profile advertised as supported in
495      * {@link CameraCharacteristics#REQUEST_AVAILABLE_COLOR_SPACE_PROFILES}
496      * queried using {@link ColorSpaceProfiles#getSupportedColorSpaces}.
497      * When set, the colorSpace will override the default color spaces of the output targets,
498      * or the color space implied by the dataSpace passed into an {@link ImageReader}'s
499      * constructor.</p>
500      *
501      * @hide
502      */
503     @TestApi
setColorSpace(@onNull ColorSpace.Named colorSpace)504     public void setColorSpace(@NonNull ColorSpace.Named colorSpace) {
505         mColorSpace = colorSpace.ordinal();
506     }
507 
508     /**
509      * Clear the color space, such that the default color space will be used.
510      *
511      * @hide
512      */
513     @TestApi
clearColorSpace()514     public void clearColorSpace() {
515         mColorSpace = ColorSpaceProfiles.UNSPECIFIED;
516     }
517 
518     /**
519      * Return the current color space.
520      *
521      * @return the currently set color space
522      * @hide
523      */
524     @TestApi
525     @SuppressLint("MethodNameUnits")
getColorSpace()526     public @Nullable ColorSpace getColorSpace() {
527         if (mColorSpace != ColorSpaceProfiles.UNSPECIFIED) {
528             return ColorSpace.get(ColorSpace.Named.values()[mColorSpace]);
529         } else {
530             return null;
531         }
532     }
533 
534     /**
535      * Create a new {@link OutputConfiguration} instance.
536      *
537      * <p>This constructor takes an argument for desired camera rotation</p>
538      *
539      * @param surface
540      *          A Surface for camera to output to.
541      * @param rotation
542      *          The desired rotation to be applied on camera output. Value must be one of
543      *          ROTATION_[0, 90, 180, 270]. Note that when the rotation is 90 or 270 degrees,
544      *          application should make sure corresponding surface size has width and height
545      *          transposed relative to the width and height without rotation. For example,
546      *          if application needs camera to capture 1280x720 picture and rotate it by 90 degree,
547      *          application should set rotation to {@code ROTATION_90} and make sure the
548      *          corresponding Surface size is 720x1280. Note that {@link CameraDevice} might
549      *          throw {@code IllegalArgumentException} if device cannot perform such rotation.
550      * @hide
551      */
552     @SystemApi
OutputConfiguration(@onNull Surface surface, int rotation)553     public OutputConfiguration(@NonNull Surface surface, int rotation) {
554         this(SURFACE_GROUP_ID_NONE, surface, rotation);
555     }
556 
557     /**
558      * Create a new {@link OutputConfiguration} instance, with rotation and a group ID.
559      *
560      * <p>This constructor takes an argument for desired camera rotation and for the surface group
561      * ID.  See {@link #OutputConfiguration(int, Surface)} for details of the group ID.</p>
562      *
563      * @param surfaceGroupId
564      *          A group ID for this output, used for sharing memory between multiple outputs.
565      * @param surface
566      *          A Surface for camera to output to.
567      * @param rotation
568      *          The desired rotation to be applied on camera output. Value must be one of
569      *          ROTATION_[0, 90, 180, 270]. Note that when the rotation is 90 or 270 degrees,
570      *          application should make sure corresponding surface size has width and height
571      *          transposed relative to the width and height without rotation. For example,
572      *          if application needs camera to capture 1280x720 picture and rotate it by 90 degree,
573      *          application should set rotation to {@code ROTATION_90} and make sure the
574      *          corresponding Surface size is 720x1280. Note that {@link CameraDevice} might
575      *          throw {@code IllegalArgumentException} if device cannot perform such rotation.
576      * @hide
577      */
578     @SystemApi
OutputConfiguration(int surfaceGroupId, @NonNull Surface surface, int rotation)579     public OutputConfiguration(int surfaceGroupId, @NonNull Surface surface, int rotation) {
580         checkNotNull(surface, "Surface must not be null");
581         checkArgumentInRange(rotation, ROTATION_0, ROTATION_270, "Rotation constant");
582         mSurfaceGroupId = surfaceGroupId;
583         mSurfaceType = SURFACE_TYPE_UNKNOWN;
584         mSurfaces = new ArrayList<Surface>();
585         mSurfaces.add(surface);
586         mRotation = rotation;
587         mConfiguredSize = SurfaceUtils.getSurfaceSize(surface);
588         mConfiguredFormat = SurfaceUtils.getSurfaceFormat(surface);
589         mConfiguredDataspace = SurfaceUtils.getSurfaceDataspace(surface);
590         mConfiguredGenerationId = surface.getGenerationId();
591         mIsDeferredConfig = false;
592         mIsShared = false;
593         mPhysicalCameraId = null;
594         mIsMultiResolution = false;
595         mSensorPixelModesUsed = new ArrayList<Integer>();
596         mDynamicRangeProfile = DynamicRangeProfiles.STANDARD;
597         mColorSpace = ColorSpaceProfiles.UNSPECIFIED;
598         mStreamUseCase = CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
599         mTimestampBase = TIMESTAMP_BASE_DEFAULT;
600         mMirrorMode = MIRROR_MODE_AUTO;
601         mMirrorModeForSurfaces = new IntArray();
602         if (Flags.mirrorModeSharedSurfaces()) {
603             mMirrorModeForSurfaces.add(mMirrorMode);
604         }
605         mReadoutTimestampEnabled = false;
606         mIsReadoutSensorTimestampBase = false;
607         mUsage = 0;
608     }
609 
610     /**
611      * Create a list of {@link OutputConfiguration} instances for the outputs used by a
612      * {@link android.hardware.camera2.MultiResolutionImageReader}.
613      *
614      * <p>This constructor takes an argument for a
615      * {@link android.hardware.camera2.MultiResolutionImageReader}.</p>
616      *
617      * @param multiResolutionImageReader
618      *          The multi-resolution image reader object.
619      */
createInstancesForMultiResolutionOutput( @onNull MultiResolutionImageReader multiResolutionImageReader)620     public static @NonNull Collection<OutputConfiguration> createInstancesForMultiResolutionOutput(
621             @NonNull MultiResolutionImageReader multiResolutionImageReader)  {
622         checkNotNull(multiResolutionImageReader, "Multi-resolution image reader must not be null");
623 
624         int groupId = getAndIncreaseMultiResolutionGroupId();
625         ImageReader[] imageReaders = multiResolutionImageReader.getReaders();
626         ArrayList<OutputConfiguration> configs = new ArrayList<OutputConfiguration>();
627         for (int i = 0; i < imageReaders.length; i++) {
628             MultiResolutionStreamInfo streamInfo =
629                     multiResolutionImageReader.getStreamInfoForImageReader(imageReaders[i]);
630 
631             OutputConfiguration config = new OutputConfiguration(
632                     groupId, imageReaders[i].getSurface());
633             config.setPhysicalCameraId(streamInfo.getPhysicalCameraId());
634             config.setMultiResolutionOutput();
635             configs.add(config);
636 
637             // No need to call addSensorPixelModeUsed for ultra high resolution sensor camera,
638             // because regular and max resolution output configurations are used for DEFAULT mode
639             // and MAX_RESOLUTION mode respectively by default.
640         }
641 
642         return configs;
643     }
644 
645     /**
646      * Create a list of {@link OutputConfiguration} instances for a
647      * {@link MultiResolutionImageReader}.
648      *
649      * <p>This method can be used to create query OutputConfigurations for a
650      * MultiResolutionImageReader that can be included in a SessionConfiguration passed into
651      * {@link
652      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#isSessionConfigurationSupported}
653      * before opening and setting up a camera device in full, at which point {@link
654      * #setSurfacesForMultiResolutionOutput} can be used to link to the actual
655      * MultiResolutionImageReader.</p>
656      *
657      * <p>This constructor takes same arguments used to create a {@link
658      * MultiResolutionImageReader}: a collection of {@link MultiResolutionStreamInfo}
659      * objects and the format.</p>
660      *
661      * @param streams The group of multi-resolution stream info objects, which are used to create a
662      *                multi-resolution image reader containing a number of ImageReaders.
663      * @param format The format of the MultiResolutionImageReader. This must be one of the {@link
664      *               android.graphics.ImageFormat} or {@link android.graphics.PixelFormat} constants
665      *               supported by the camera device. Note that not all formats are supported, like
666      *               {@link ImageFormat#NV21}. The supported multi-resolution reader format can be
667      *               queried by {@link MultiResolutionStreamConfigurationMap#getOutputFormats}.
668      *
669      * @return The list of {@link OutputConfiguration} objects for a MultiResolutionImageReader.
670      *
671      * @throws IllegalArgumentException If the {@code streams} is null or doesn't contain
672      *                                 at least 2 items, or if {@code format} isn't a valid camera
673      *                                 format.
674      *
675      * @see MultiResolutionImageReader
676      * @see MultiResolutionStreamInfo
677      */
678     @FlaggedApi(Flags.FLAG_CAMERA_DEVICE_SETUP)
createInstancesForMultiResolutionOutput( @onNull Collection<MultiResolutionStreamInfo> streams, @Format int format)679     public static @NonNull List<OutputConfiguration> createInstancesForMultiResolutionOutput(
680             @NonNull Collection<MultiResolutionStreamInfo> streams,
681             @Format int format)  {
682         if (streams == null || streams.size() <= 1) {
683             throw new IllegalArgumentException(
684                 "The streams list must contain at least 2 entries");
685         }
686         if (format == ImageFormat.NV21) {
687             throw new IllegalArgumentException(
688                     "NV21 format is not supported");
689         }
690 
691         int groupId = getAndIncreaseMultiResolutionGroupId();
692         ArrayList<OutputConfiguration> configs = new ArrayList<OutputConfiguration>();
693         for (MultiResolutionStreamInfo stream : streams) {
694             Size surfaceSize = new Size(stream.getWidth(), stream.getHeight());
695             OutputConfiguration config = new OutputConfiguration(
696                     groupId, format, surfaceSize);
697             config.setPhysicalCameraId(stream.getPhysicalCameraId());
698             config.setMultiResolutionOutput();
699             configs.add(config);
700 
701             // No need to call addSensorPixelModeUsed for ultra high resolution sensor camera,
702             // because regular and max resolution output configurations are used for DEFAULT mode
703             // and MAX_RESOLUTION mode respectively by default.
704         }
705 
706         return configs;
707     }
708 
709     /**
710      * Set the OutputConfiguration surfaces corresponding to the {@link MultiResolutionImageReader}.
711      *
712      * <p>This function should be used together with {@link
713      * #createInstancesForMultiResolutionOutput}. The application calls {@link
714      * #createInstancesForMultiResolutionOutput} first to create a list of
715      * OutputConfiguration objects without the actual MultiResolutionImageReader.
716      * Once the MultiResolutionImageReader is created later during full camera setup, the
717      * application then calls this function to assign the surfaces to the OutputConfiguration
718      * instances.</p>
719      *
720      * @param outputConfigurations The OutputConfiguration objects created by {@link
721      *                             #createInstancesForMultiResolutionOutput}
722      * @param multiResolutionImageReader The MultiResolutionImageReader object created from the same
723      *                                   MultiResolutionStreamInfo parameters as
724      *                                   {@code outputConfigurations}.
725      * @throws IllegalArgumentException If {@code outputConfigurations} or {@code
726      *                                  multiResolutionImageReader} is {@code null}, the {@code
727      *                                  outputConfigurations} and {@code multiResolutionImageReader}
728      *                                  sizes don't match, or if the
729      *                                  {@code multiResolutionImageReader}'s surfaces don't match
730      *                                  with the {@code outputConfigurations}.
731      * @throws IllegalStateException If {@code outputConfigurations} already contains valid output
732      *                               surfaces.
733      */
734     @FlaggedApi(Flags.FLAG_CAMERA_DEVICE_SETUP)
setSurfacesForMultiResolutionOutput( @onNull Collection<OutputConfiguration> outputConfigurations, @NonNull MultiResolutionImageReader multiResolutionImageReader)735     public static void setSurfacesForMultiResolutionOutput(
736             @NonNull Collection<OutputConfiguration> outputConfigurations,
737             @NonNull MultiResolutionImageReader multiResolutionImageReader) {
738         checkNotNull(outputConfigurations, "outputConfigurations must not be null");
739         checkNotNull(multiResolutionImageReader, "multiResolutionImageReader must not be null");
740         if (outputConfigurations.size() != multiResolutionImageReader.getReaders().length) {
741             throw new IllegalArgumentException(
742                     "outputConfigurations and multiResolutionImageReader sizes must match");
743         }
744 
745         for (OutputConfiguration config : outputConfigurations) {
746             String physicalCameraId = config.getPhysicalCameraId();
747             if (physicalCameraId == null) {
748                 physicalCameraId = "";
749             }
750             Surface surface = multiResolutionImageReader.getSurface(config.getConfiguredSize(),
751                     physicalCameraId);
752             config.addSurface(surface);
753         }
754     }
755 
756     /**
757      * Create a new {@link OutputConfiguration} instance, with desired Surface size and Surface
758      * source class.
759      * <p>
760      * This constructor takes an argument for desired Surface size and the Surface source class
761      * without providing the actual output Surface. This is used to setup an output configuration
762      * with a deferred Surface. The application can use this output configuration to create a
763      * session.
764      * </p>
765      *
766      * <p>Starting from {@link android.os.Build.VERSION_CODES#VANILLA_ICE_CREAM Android V},
767      * the deferred Surface can be obtained: (1) from {@link android.view.SurfaceView}
768      * by calling {@link android.view.SurfaceHolder#getSurface}, (2) from
769      * {@link android.graphics.SurfaceTexture} via
770      * {@link android.view.Surface#Surface(android.graphics.SurfaceTexture)}, (3) from
771      * {@link android.media.MediaRecorder} via {@link android.media.MediaRecorder#getSurface} or
772      * {@link android.media.MediaCodec#createPersistentInputSurface}, or (4) from
773      * {@link android.media.MediaCodec} via {@link android.media.MediaCodec#createInputSurface} or
774      * {@link android.media.MediaCodec#createPersistentInputSurface}.</p>
775      *
776      * <ul>
777      * <li>Surfaces for {@link android.view.SurfaceView} and {@link android.graphics.SurfaceTexture}
778      * can be deferred until after {@link CameraDevice#createCaptureSession}. In that case, the
779      * output Surface must be set via {@link #addSurface}, and the Surface configuration must be
780      * finalized via {@link CameraCaptureSession#finalizeOutputConfigurations} before submitting
781      * a request with the Surface target.</li>
782      * <li>For all other target types, the output Surface must be set by {@link #addSurface},
783      * and {@link CameraCaptureSession#finalizeOutputConfigurations} is not needed because the
784      * OutputConfiguration used to create the session will contain the actual Surface.</li>
785      * </ul>
786      *
787      * <p>Before {@link android.os.Build.VERSION_CODES#VANILLA_ICE_CREAM Android V}, only {@link
788      * android.view.SurfaceView} and {@link android.graphics.SurfaceTexture} are supported. Both
789      * kind of outputs can be deferred until after {@link
790      * CameraDevice#createCaptureSessionByOutputConfigurations}.</p>
791      *
792      * <p>An OutputConfiguration object created by this constructor can be used for {@link
793      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#isSessionConfigurationSupported}
794      * and {@link
795      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#getSessionCharacteristics} without
796      * having called {@link #addSurface}.</p>
797      *
798      * @param surfaceSize Size for the deferred surface.
799      * @param klass a non-{@code null} {@link Class} object reference that indicates the source of
800      *            this surface. Only {@link android.view.SurfaceHolder SurfaceHolder.class},
801      *            {@link android.graphics.SurfaceTexture SurfaceTexture.class}, {@link
802      *            android.media.MediaRecorder MediaRecorder.class}, and
803      *            {@link android.media.MediaCodec MediaCodec.class} are supported.
804      *            Before {@link android.os.Build.VERSION_CODES#VANILLA_ICE_CREAM Android V}, only
805      *            {@link android.view.SurfaceHolder SurfaceHolder.class} and {@link
806      *            android.graphics.SurfaceTexture SurfaceTexture.class} are supported.
807      * @throws IllegalArgumentException if the Surface source class is not supported, or Surface
808      *         size is zero.
809      */
OutputConfiguration(@onNull Size surfaceSize, @NonNull Class<T> klass)810     public <T> OutputConfiguration(@NonNull Size surfaceSize, @NonNull Class<T> klass) {
811         checkNotNull(surfaceSize, "surfaceSize must not be null");
812         checkNotNull(klass, "klass must not be null");
813         if (klass == android.view.SurfaceHolder.class) {
814             mSurfaceType = SURFACE_TYPE_SURFACE_VIEW;
815             mIsDeferredConfig = true;
816         } else if (klass == android.graphics.SurfaceTexture.class) {
817             mSurfaceType = SURFACE_TYPE_SURFACE_TEXTURE;
818             mIsDeferredConfig = true;
819         } else if (klass == android.media.MediaRecorder.class) {
820             mSurfaceType = SURFACE_TYPE_MEDIA_RECORDER;
821             mIsDeferredConfig = false;
822         } else if (klass == android.media.MediaCodec.class) {
823             mSurfaceType = SURFACE_TYPE_MEDIA_CODEC;
824             mIsDeferredConfig = false;
825         } else {
826             mSurfaceType = SURFACE_TYPE_UNKNOWN;
827             throw new IllegalArgumentException("Unknown surface source class type");
828         }
829 
830         if (surfaceSize.getWidth() == 0 || surfaceSize.getHeight() == 0) {
831             throw new IllegalArgumentException("Surface size needs to be non-zero");
832         }
833 
834         mSurfaceGroupId = SURFACE_GROUP_ID_NONE;
835         mSurfaces = new ArrayList<Surface>();
836         mMirrorModeForSurfaces = new IntArray();
837         mRotation = ROTATION_0;
838         mConfiguredSize = surfaceSize;
839         mConfiguredFormat = StreamConfigurationMap.imageFormatToInternal(ImageFormat.PRIVATE);
840         mConfiguredDataspace = StreamConfigurationMap.imageFormatToDataspace(ImageFormat.PRIVATE);
841         mConfiguredGenerationId = 0;
842         mIsShared = false;
843         mPhysicalCameraId = null;
844         mIsMultiResolution = false;
845         mSensorPixelModesUsed = new ArrayList<Integer>();
846         mDynamicRangeProfile = DynamicRangeProfiles.STANDARD;
847         mColorSpace = ColorSpaceProfiles.UNSPECIFIED;
848         mStreamUseCase = CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
849         mReadoutTimestampEnabled = false;
850         mIsReadoutSensorTimestampBase = false;
851         mUsage = 0;
852     }
853 
854     /**
855      * Create a new {@link OutputConfiguration} instance for an {@link ImageReader} for a given
856      * format and size.
857      *
858      * <p>This constructor creates an OutputConfiguration for an ImageReader without providing
859      * the actual output Surface. The actual output Surface must be set via {@link #addSurface}
860      * before creating the capture session.</p>
861      *
862      * <p>An OutputConfiguration object created by this constructor can be used for {@link
863      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#isSessionConfigurationSupported}
864      * and {@link
865      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#getSessionCharacteristics} without
866      * having called {@link #addSurface}.</p>
867      *
868      * @param format The format of the ImageReader output. This must be one of the
869      *               {@link android.graphics.ImageFormat} or {@link android.graphics.PixelFormat}
870      *               constants. Note that not all formats are supported by the camera device.
871      * @param surfaceSize Size for the ImageReader surface.
872      * @throws IllegalArgumentException if the Surface size is null or zero.
873      */
874     @FlaggedApi(Flags.FLAG_CAMERA_DEVICE_SETUP)
OutputConfiguration(@ormat int format, @NonNull Size surfaceSize)875     public OutputConfiguration(@Format int format, @NonNull Size surfaceSize) {
876         this(format, surfaceSize,
877                 format == ImageFormat.PRIVATE ? 0 : HardwareBuffer.USAGE_CPU_READ_OFTEN);
878     }
879 
880     /**
881      * Create a new {@link OutputConfiguration} instance for an {@link ImageReader} for a given
882      * surfaceGroupId, format, and size.
883      *
884      * <p>This constructor creates an OutputConfiguration for an ImageReader without providing
885      * the actual output Surface. The actual output Surface must be set via {@link #addSurface}
886      * before creating the capture session.</p>
887      *
888      * <p>An OutputConfiguration object created by this constructor can be used for {@link
889      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#isSessionConfigurationSupported}
890      * and {@link
891      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#getSessionCharacteristics} without
892      * having called {@link #addSurface}.</p>
893      *
894      * @param surfaceGroupId A group ID for this output, used for sharing memory between multiple
895      *                       outputs.
896      * @param format The format of the ImageReader output. This must be one of the
897      *               {@link android.graphics.ImageFormat} or {@link android.graphics.PixelFormat}
898      *               constants. Note that not all formats are supported by the camera device.
899      * @param surfaceSize Size for the ImageReader surface.
900      * @throws IllegalArgumentException if the Surface size is null or zero.
901      */
902     @FlaggedApi(Flags.FLAG_CAMERA_DEVICE_SETUP)
OutputConfiguration(int surfaceGroupId, @Format int format, @NonNull Size surfaceSize)903     public OutputConfiguration(int surfaceGroupId, @Format int format, @NonNull Size surfaceSize) {
904         this(surfaceGroupId, format, surfaceSize,
905                 format == ImageFormat.PRIVATE ? 0 : HardwareBuffer.USAGE_CPU_READ_OFTEN);
906     }
907 
908     /**
909      * Create a new {@link OutputConfiguration} instance for an {@link ImageReader} for a given
910      * format, size, and usage flags.
911      *
912      * <p>This constructor creates an OutputConfiguration for an ImageReader without providing
913      * the actual output Surface. The actual output Surface must be set via {@link #addSurface}
914      * before creating the capture session.</p>
915      *
916      * <p>An OutputConfiguration object created by this constructor can be used for {@link
917      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#isSessionConfigurationSupported}
918      * and {@link
919      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#getSessionCharacteristics} without
920      * having called {@link #addSurface}.</p>
921      *
922      * @param format The format of the ImageReader output. This must be one of the
923      *               {@link android.graphics.ImageFormat} or {@link android.graphics.PixelFormat}
924      *               constants. Note that not all formats are supported by the camera device.
925      * @param surfaceSize Size for the ImageReader surface.
926      * @param usage The usage flags of the ImageReader output surface.
927      * @throws IllegalArgumentException if the Surface size is null or zero.
928      */
929     @FlaggedApi(Flags.FLAG_CAMERA_DEVICE_SETUP)
OutputConfiguration(@ormat int format, @NonNull Size surfaceSize, @Usage long usage)930     public OutputConfiguration(@Format int format, @NonNull Size surfaceSize, @Usage long usage) {
931         this(SURFACE_GROUP_ID_NONE, format, surfaceSize, usage);
932     }
933 
934     /**
935      * Create a new {@link OutputConfiguration} instance for an {@link ImageReader} for a given
936      * surface group id, format, size, and usage flags.
937      *
938      * <p>This constructor creates an OutputConfiguration for an ImageReader without providing
939      * the actual output Surface. The actual output Surface must be set via {@link #addSurface}
940      * before creating the capture session.</p>
941      *
942      * <p>An OutputConfiguration object created by this constructor can be used for {@link
943      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#isSessionConfigurationSupported}
944      * and {@link
945      * android.hardware.camera2.CameraDevice.CameraDeviceSetup#getSessionCharacteristics} without
946      * having called {@link #addSurface}.</p>
947      *
948      * @param surfaceGroupId A group ID for this output, used for sharing memory between multiple
949      *                       outputs.
950      * @param format The format of the ImageReader output. This must be one of the
951      *               {@link android.graphics.ImageFormat} or {@link android.graphics.PixelFormat}
952      *               constants. Note that not all formats are supported by the camera device.
953      * @param surfaceSize Size for the ImageReader surface.
954      * @param usage The usage flags of the ImageReader output surface.
955      * @throws IllegalArgumentException if the Surface size is null or zero.
956      */
957     @FlaggedApi(Flags.FLAG_CAMERA_DEVICE_SETUP)
OutputConfiguration(int surfaceGroupId, @Format int format, @NonNull Size surfaceSize, @Usage long usage)958     public OutputConfiguration(int surfaceGroupId, @Format int format,
959             @NonNull Size surfaceSize, @Usage long usage) {
960         checkNotNull(surfaceSize, "surfaceSize must not be null");
961         if (surfaceSize.getWidth() == 0 || surfaceSize.getHeight() == 0) {
962             throw new IllegalArgumentException("Surface size needs to be non-zero");
963         }
964 
965         mSurfaceType = SURFACE_TYPE_IMAGE_READER;
966         mSurfaceGroupId = surfaceGroupId;
967         mSurfaces = new ArrayList<Surface>();
968         mRotation = ROTATION_0;
969         mConfiguredSize = surfaceSize;
970         mConfiguredFormat = StreamConfigurationMap.imageFormatToInternal(format);
971         mConfiguredDataspace = StreamConfigurationMap.imageFormatToDataspace(format);
972         mConfiguredGenerationId = 0;
973         mIsDeferredConfig = false;
974         mIsShared = false;
975         mPhysicalCameraId = null;
976         mIsMultiResolution = false;
977         mSensorPixelModesUsed = new ArrayList<Integer>();
978         mDynamicRangeProfile = DynamicRangeProfiles.STANDARD;
979         mColorSpace = ColorSpaceProfiles.UNSPECIFIED;
980         mStreamUseCase = CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
981         mTimestampBase = TIMESTAMP_BASE_DEFAULT;
982         mMirrorMode = MIRROR_MODE_AUTO;
983         mMirrorModeForSurfaces = new IntArray();
984         mReadoutTimestampEnabled = false;
985         mIsReadoutSensorTimestampBase = false;
986         mUsage = usage;
987     }
988 
989     /**
990      * Enable multiple surfaces sharing the same OutputConfiguration
991      *
992      * <p>For advanced use cases, a camera application may require more streams than the combination
993      * guaranteed by {@link CameraDevice#createCaptureSession}. In this case, more than one
994      * compatible surface can be attached to an OutputConfiguration so that they map to one
995      * camera stream, and the outputs share memory buffers when possible. Due to buffer sharing
996      * clients should be careful when adding surface outputs that modify their input data. If such
997      * case exists, camera clients should have an additional mechanism to synchronize read and write
998      * access between individual consumers.</p>
999      *
1000      * <p>Two surfaces are compatible in the below cases:</p>
1001      *
1002      * <li> Surfaces with the same size, format, dataSpace, and Surface source class. In this case,
1003      * {@link CameraDevice#createCaptureSessionByOutputConfigurations} is guaranteed to succeed.
1004      *
1005      * <li> Surfaces with the same size, format, and dataSpace, but different Surface source classes
1006      * that are generally not compatible. However, on some devices, the underlying camera device is
1007      * able to use the same buffer layout for both surfaces. The only way to discover if this is the
1008      * case is to create a capture session with that output configuration. For example, if the
1009      * camera device uses the same private buffer format between a SurfaceView/SurfaceTexture and a
1010      * MediaRecorder/MediaCodec, {@link CameraDevice#createCaptureSessionByOutputConfigurations}
1011      * will succeed. Otherwise, it fails with {@link
1012      * CameraCaptureSession.StateCallback#onConfigureFailed}.
1013      * </ol>
1014      *
1015      * <p>To enable surface sharing, this function must be called before {@link
1016      * CameraDevice#createCaptureSessionByOutputConfigurations} or {@link
1017      * CameraDevice#createReprocessableCaptureSessionByConfigurations}. Calling this function after
1018      * {@link CameraDevice#createCaptureSessionByOutputConfigurations} has no effect.</p>
1019      *
1020      * <p>Up to {@link #getMaxSharedSurfaceCount} surfaces can be shared for an OutputConfiguration.
1021      * The supported surfaces for sharing must be of type SurfaceTexture, SurfaceView,
1022      * MediaRecorder, MediaCodec, or implementation defined ImageReader.</p>
1023      *
1024      * <p>This function must not be called from OutputConfigurations created by {@link
1025      * #createInstancesForMultiResolutionOutput}.</p>
1026      *
1027      * @throws IllegalStateException If this OutputConfiguration is created via {@link
1028      * #createInstancesForMultiResolutionOutput} to back a MultiResolutionImageReader.
1029      */
enableSurfaceSharing()1030     public void enableSurfaceSharing() {
1031         if (mIsMultiResolution) {
1032             throw new IllegalStateException("Cannot enable surface sharing on "
1033                     + "multi-resolution output configurations");
1034         }
1035         mIsShared = true;
1036     }
1037 
1038     /**
1039      * Set the id of the physical camera for this OutputConfiguration
1040      *
1041      * <p>In the case one logical camera is made up of multiple physical cameras, it could be
1042      * desirable for the camera application to request streams from individual physical cameras.
1043      * This call achieves it by mapping the OutputConfiguration to the physical camera id.</p>
1044      *
1045      * <p>The valid physical camera ids can be queried by {@link
1046      * CameraCharacteristics#getPhysicalCameraIds}.</p>
1047      *
1048      * <p>Passing in a null physicalCameraId means that the OutputConfiguration is for a logical
1049      * stream.</p>
1050      *
1051      * <p>This function must be called before {@link
1052      * CameraDevice#createCaptureSessionByOutputConfigurations} or {@link
1053      * CameraDevice#createReprocessableCaptureSessionByConfigurations}. Calling this function
1054      * after {@link CameraDevice#createCaptureSessionByOutputConfigurations} or {@link
1055      * CameraDevice#createReprocessableCaptureSessionByConfigurations} has no effect.</p>
1056      *
1057      * <p>As of {@link android.os.Build.VERSION_CODES#S Android 12}, an image buffer from a
1058      * physical camera stream can be used for reprocessing to logical camera streams and streams
1059      * from the same physical camera if the camera device supports multi-resolution input and output
1060      * streams. See {@link CameraCharacteristics#SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP}
1061      * for details. The behaviors of reprocessing from a non-physical camera stream to a physical
1062      * camera stream, and from a physical camera stream to a physical camera stream of different
1063      * physical camera, are device-specific and not guaranteed to be supported.</p>
1064      *
1065      * <p>On prior API levels, the surface belonging to a physical camera OutputConfiguration must
1066      * not be used as input or output of a reprocessing request. </p>
1067      */
setPhysicalCameraId(@ullable String physicalCameraId)1068     public void setPhysicalCameraId(@Nullable String physicalCameraId) {
1069         mPhysicalCameraId = physicalCameraId;
1070     }
1071 
1072     /**
1073      * Add a sensor pixel mode that this OutputConfiguration will be used in.
1074      *
1075      * <p> In the case that this output stream configuration (format, width, height) is
1076      * available through {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP}
1077      * configurations and
1078      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION},
1079      * configurations, the camera sub-system will assume that this {@link OutputConfiguration} will
1080      * be used only with {@link android.hardware.camera2.CaptureRequest}s which has
1081      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE} set to
1082      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT}.
1083      * In such cases, if clients intend to use the
1084      * {@link OutputConfiguration}(s) in a {@link android.hardware.camera2.CaptureRequest} with
1085      * other sensor pixel modes, they must specify which
1086      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE}(s) they will use this
1087      * {@link OutputConfiguration} with, by calling this method.
1088      *
1089      * In case this output stream configuration (format, width, height) is only in
1090      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION},
1091      * configurations, this output target must only be used with
1092      * {@link android.hardware.camera2.CaptureRequest}s which has
1093      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE} set to
1094      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION} and that
1095      * is what the camera sub-system will assume. If clients add
1096      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT} in this
1097      * case, session configuration will fail, if this {@link OutputConfiguration} is included.
1098      *
1099      * In case this output stream configuration (format, width, height) is only in
1100      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP},
1101      * configurations, this output target must only be used with
1102      * {@link android.hardware.camera2.CaptureRequest}s which has
1103      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE} set to
1104      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT} and that is what
1105      * the camera sub-system will assume. If clients add
1106      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION} in this
1107      * case, session configuration will fail, if this {@link OutputConfiguration} is included.
1108      *
1109      * @param sensorPixelModeUsed The sensor pixel mode this OutputConfiguration will be used with
1110      * </p>
1111      *
1112      */
addSensorPixelModeUsed(@ensorPixelMode int sensorPixelModeUsed)1113     public void addSensorPixelModeUsed(@SensorPixelMode int sensorPixelModeUsed) {
1114         // Verify that the values are in range.
1115         if (sensorPixelModeUsed != CameraMetadata.SENSOR_PIXEL_MODE_DEFAULT &&
1116                 sensorPixelModeUsed != CameraMetadata.SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) {
1117             throw new IllegalArgumentException("Not a valid sensor pixel mode " +
1118                     sensorPixelModeUsed);
1119         }
1120 
1121         if (mSensorPixelModesUsed.contains(sensorPixelModeUsed)) {
1122             // Already added, ignore;
1123             return;
1124         }
1125         mSensorPixelModesUsed.add(sensorPixelModeUsed);
1126     }
1127 
1128     /**
1129      * Remove a sensor pixel mode, previously added through addSensorPixelModeUsed, from this
1130      * OutputConfiguration.
1131      *
1132      * <p> Sensor pixel modes added via calls to {@link #addSensorPixelModeUsed} can also be removed
1133      * from the OutputConfiguration.</p>
1134      *
1135      * @param sensorPixelModeUsed The sensor pixel mode to be removed.
1136      *
1137      * @throws IllegalArgumentException If the sensor pixel mode wasn't previously added
1138      *                                  through {@link #addSensorPixelModeUsed}.
1139      */
removeSensorPixelModeUsed(@ensorPixelMode int sensorPixelModeUsed)1140     public void removeSensorPixelModeUsed(@SensorPixelMode int sensorPixelModeUsed) {
1141       if (!mSensorPixelModesUsed.remove(Integer.valueOf(sensorPixelModeUsed))) {
1142             throw new IllegalArgumentException("sensorPixelMode " + sensorPixelModeUsed +
1143                     "is not part of this output configuration");
1144       }
1145     }
1146 
1147     /**
1148      * Check if this configuration is for a physical camera.
1149      *
1150      * <p>This returns true if the output configuration was for a physical camera making up a
1151      * logical multi camera via {@link OutputConfiguration#setPhysicalCameraId}.</p>
1152      * @hide
1153      */
isForPhysicalCamera()1154     public boolean isForPhysicalCamera() {
1155         return (mPhysicalCameraId != null);
1156     }
1157 
1158     /**
1159      * Check if this configuration has deferred configuration.
1160      *
1161      * <p>This will return true if the output configuration was constructed with {@link
1162      * android.view.SurfaceView} or {@link android.graphics.SurfaceTexture} deferred by
1163      * {@link OutputConfiguration#OutputConfiguration(Size, Class)}. It will return true even after
1164      * the deferred surface is added later by {@link OutputConfiguration#addSurface}.</p>
1165      *
1166      * @return true if this configuration has deferred surface.
1167      * @hide
1168      */
isDeferredConfiguration()1169     public boolean isDeferredConfiguration() {
1170         return mIsDeferredConfig;
1171     }
1172 
1173     /**
1174      * Add a surface to this OutputConfiguration.
1175      *
1176      * <p> This function can be called before or after {@link
1177      * CameraDevice#createCaptureSessionByOutputConfigurations}. If it's called after,
1178      * the application must finalize the capture session with
1179      * {@link CameraCaptureSession#finalizeOutputConfigurations}. It is possible to call this method
1180      * after the output configurations have been finalized only in cases of enabled surface sharing
1181      * see {@link #enableSurfaceSharing}. The modified output configuration must be updated with
1182      * {@link CameraCaptureSession#updateOutputConfiguration}. If this function is called before
1183      * session creation, {@link CameraCaptureSession#finalizeOutputConfigurations} doesn't need to
1184      * be called.</p>
1185      *
1186      * <p> If the OutputConfiguration was constructed by {@link
1187      * OutputConfiguration#OutputConfiguration(Size, Class)}, the added surface must be obtained:
1188      * <ul>
1189      * <li>from {@link android.view.SurfaceView} by calling
1190      * {@link android.view.SurfaceHolder#getSurface}</li>
1191      * <li>from {@link android.graphics.SurfaceTexture} by calling
1192      * {@link android.view.Surface#Surface(android.graphics.SurfaceTexture)}</li>
1193      * <li>from {@link android.media.MediaRecorder} by calling
1194      * {@link android.media.MediaRecorder#getSurface} or {@link
1195      * android.media.MediaCodec#createPersistentInputSurface}</li>
1196      * <li>from {@link android.media.MediaCodec} by calling
1197      * {@link android.media.MediaCodec#createInputSurface} or
1198      * {@link android.media.MediaCodec#createPersistentInputSurface()}</li>
1199      * </ul>
1200      *
1201      * <p> If the OutputConfiguration was constructed by {@link #OutputConfiguration(int, Size)}
1202      * or its variants, the added surface must be obtained from {@link android.media.ImageReader}
1203      * by calling {@link android.media.ImageReader#getSurface}.</p>
1204      *
1205      * <p> If the OutputConfiguration was constructed by other constructors, the added
1206      * surface must be compatible with the existing surface. See {@link #enableSurfaceSharing} for
1207      * details of compatible surfaces.</p>
1208      *
1209      * <p> If the OutputConfiguration already contains a Surface, {@link #enableSurfaceSharing} must
1210      * be called before calling this function to add a new Surface.</p>
1211      *
1212      * @param surface The surface to be added.
1213      * @throws IllegalArgumentException if the Surface is invalid, the Surface's
1214      *         dataspace/format doesn't match, or adding the Surface would exceed number of
1215      *         shared surfaces supported.
1216      * @throws IllegalStateException if the Surface was already added to this OutputConfiguration,
1217      *         or if the OutputConfiguration is not shared and it already has a surface associated
1218      *         with it.
1219      */
addSurface(@onNull Surface surface)1220     public void addSurface(@NonNull Surface surface) {
1221         checkNotNull(surface, "Surface must not be null");
1222         if (mSurfaces.contains(surface)) {
1223             throw new IllegalStateException("Surface is already added!");
1224         }
1225         if (mSurfaces.size() == 1 && !mIsShared) {
1226             throw new IllegalStateException("Cannot have 2 surfaces for a non-sharing configuration");
1227         }
1228         if (mSurfaces.size() + 1 > MAX_SURFACES_COUNT) {
1229             throw new IllegalArgumentException("Exceeds maximum number of surfaces");
1230         }
1231 
1232         // This will throw IAE is the surface was abandoned.
1233         Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
1234         if (!surfaceSize.equals(mConfiguredSize)) {
1235             Log.w(TAG, "Added surface size " + surfaceSize +
1236                     " is different than pre-configured size " + mConfiguredSize +
1237                     ", the pre-configured size will be used.");
1238         }
1239 
1240         if (mConfiguredFormat != SurfaceUtils.getSurfaceFormat(surface)) {
1241             throw new IllegalArgumentException("The format of added surface format doesn't match");
1242         }
1243 
1244         // If the surface format is PRIVATE, do not enforce dataSpace because camera device may
1245         // override it.
1246         if (mConfiguredFormat != ImageFormat.PRIVATE &&
1247                 mConfiguredDataspace != SurfaceUtils.getSurfaceDataspace(surface)) {
1248             throw new IllegalArgumentException("The dataspace of added surface doesn't match");
1249         }
1250 
1251         mSurfaces.add(surface);
1252         if (Flags.mirrorModeSharedSurfaces()) {
1253             mMirrorModeForSurfaces.add(mMirrorMode);
1254         }
1255     }
1256 
1257     /**
1258      * Remove a surface from this OutputConfiguration.
1259      *
1260      * <p> Surfaces added via calls to {@link #addSurface} can also be removed from the
1261      *  OutputConfiguration. The only notable exception is the surface associated with
1262      *  the OutputConfiguration (see {@link #getSurface}) which was passed as part of the
1263      *  constructor or was added first in the case of
1264      *  {@link OutputConfiguration#OutputConfiguration(Size, Class)}, {@link
1265      *  OutputConfiguration#OutputConfiguration(int, Size)}, {@link
1266      *  OutputConfiguration#OutputConfiguration(int, Size, long)}, {@link
1267      *  OutputConfiguration#OutputConfiguration(int, int, Size)}, {@link
1268      *  OutputConfiguration#OutputConfiguration(int, int, Size, long)}.</p>
1269      *
1270      * @param surface The surface to be removed.
1271      *
1272      * @throws IllegalArgumentException If the surface is associated with this OutputConfiguration
1273      *                                  (see {@link #getSurface}) or the surface didn't get added
1274      *                                  with {@link #addSurface}.
1275      */
removeSurface(@onNull Surface surface)1276     public void removeSurface(@NonNull Surface surface) {
1277         checkNotNull(surface, "Surface must not be null");
1278         if (getSurface() == surface) {
1279             throw new IllegalArgumentException(
1280                     "Cannot remove surface associated with this output configuration");
1281         }
1282 
1283         int surfaceIndex = mSurfaces.indexOf(surface);
1284         if (surfaceIndex == -1) {
1285             throw new IllegalArgumentException("Surface is not part of this output configuration");
1286         }
1287 
1288         mSurfaces.remove(surfaceIndex);
1289         if (Flags.mirrorModeSharedSurfaces()) {
1290             mMirrorModeForSurfaces.remove(surfaceIndex);
1291         }
1292     }
1293 
1294     /**
1295      * Set stream use case for this OutputConfiguration
1296      *
1297      * <p>Stream use case is used to describe the purpose of the stream, whether it's for live
1298      * preview, still image capture, video recording, or their combinations. This flag is useful
1299      * for scenarios where the immediate consumer target isn't sufficient to indicate the stream's
1300      * usage.</p>
1301      *
1302      * <p>The main difference between stream use case and capture intent is that the former
1303      * enables the camera device to optimize camera hardware and software pipelines based on user
1304      * scenarios for each stream, whereas the latter is mainly a hint to camera to decide
1305      * optimal 3A strategy that's applicable to the whole session. The camera device carries out
1306      * configurations such as selecting tuning parameters, choosing camera sensor mode, and
1307      * constructing image processing pipeline based on the streams's use cases. Capture intents are
1308      * then used to fine tune 3A behaviors such as adjusting AE/AF convergence speed, and capture
1309      * intents may change during the lifetime of a session. For example, for a session with a
1310      * PREVIEW_VIDEO_STILL use case stream and a STILL_CAPTURE use case stream, the capture intents
1311      * may be PREVIEW with fast 3A convergence speed and flash metering with automatic control for
1312      * live preview, STILL_CAPTURE with best 3A parameters for still photo capture, or VIDEO_RECORD
1313      * with slower 3A convergence speed for better video playback experience.</p>
1314      *
1315      * <p>The supported stream use cases supported by a camera device can be queried by
1316      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES}.</p>
1317      *
1318      * <p>The mandatory stream combinations involving stream use cases can be found at {@link
1319      * android.hardware.camera2.CameraDevice#createCaptureSession}, as well as queried via
1320      * {@link android.hardware.camera2.params.MandatoryStreamCombination}. The application is
1321      * strongly recommended to select one of the guaranteed stream combinations where all streams'
1322      * use cases are set to non-DEFAULT values. If the application chooses a stream combination
1323      * not in the mandatory list, the camera device may ignore some use case flags due to
1324      * hardware constraints or implementation details.</p>
1325      *
1326      * <p>This function must be called before {@link CameraDevice#createCaptureSession} or {@link
1327      * CameraDevice#createCaptureSessionByOutputConfigurations}. Calling this function after
1328      * {@link CameraDevice#createCaptureSession} or
1329      * {@link CameraDevice#createCaptureSessionByOutputConfigurations} has no effect to the camera
1330      * session.</p>
1331      *
1332      * @param streamUseCase The stream use case to be set.
1333      *
1334      * @throws IllegalArgumentException If the streamUseCase isn't within the range of valid
1335      *                                  values.
1336      */
setStreamUseCase(@treamUseCase long streamUseCase)1337     public void setStreamUseCase(@StreamUseCase long streamUseCase) {
1338         // Verify that the value is in range
1339         long maxUseCaseValue = CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW;
1340         if (streamUseCase > maxUseCaseValue &&
1341                 streamUseCase < CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START) {
1342             throw new IllegalArgumentException("Not a valid stream use case value " +
1343                     streamUseCase);
1344         }
1345 
1346         mStreamUseCase = streamUseCase;
1347     }
1348 
1349     /**
1350      * Get the current stream use case
1351      *
1352      * <p>If no {@link #setStreamUseCase} is called first, this function returns
1353      * {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT DEFAULT}.</p>
1354      *
1355      * @return the currently set stream use case
1356      */
getStreamUseCase()1357     public long getStreamUseCase() {
1358         return mStreamUseCase;
1359     }
1360 
1361     /**
1362      * Set timestamp base for this output target
1363      *
1364      * <p>Timestamp base describes the time domain of images from this
1365      * camera output and its relationship with {@link
1366      * CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}.</p>
1367      *
1368      * <p>If this function is not called, the timestamp base for this output
1369      * is {@link #TIMESTAMP_BASE_DEFAULT}, with which the camera device adjusts
1370      * timestamps based on the output target.</p>
1371      *
1372      * <p>See {@link #TIMESTAMP_BASE_DEFAULT}, {@link #TIMESTAMP_BASE_SENSOR},
1373      * and {@link #TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED} for details of each timestamp base.</p>
1374      *
1375      * @param timestampBase The timestamp base to be set.
1376      *
1377      * @throws IllegalArgumentException If the timestamp base isn't within the range of valid
1378      *                                  values.
1379      */
setTimestampBase(@imestampBase int timestampBase)1380     public void setTimestampBase(@TimestampBase int timestampBase) {
1381         // Verify that the value is in range
1382         if (timestampBase < TIMESTAMP_BASE_DEFAULT ||
1383                 timestampBase > TIMESTAMP_BASE_READOUT_SENSOR) {
1384             throw new IllegalArgumentException("Not a valid timestamp base value " +
1385                     timestampBase);
1386         }
1387 
1388         if (timestampBase == TIMESTAMP_BASE_READOUT_SENSOR) {
1389             mTimestampBase = TIMESTAMP_BASE_SENSOR;
1390             mReadoutTimestampEnabled = true;
1391             mIsReadoutSensorTimestampBase = true;
1392         } else {
1393             mTimestampBase = timestampBase;
1394             mIsReadoutSensorTimestampBase = false;
1395         }
1396     }
1397 
1398     /**
1399      * Get the current timestamp base
1400      *
1401      * <p>If no {@link #setTimestampBase} is called first, this function returns
1402      * {@link #TIMESTAMP_BASE_DEFAULT}.</p>
1403      *
1404      * @return The currently set timestamp base
1405      */
getTimestampBase()1406     public @TimestampBase int getTimestampBase() {
1407         if (mIsReadoutSensorTimestampBase) {
1408             return TIMESTAMP_BASE_READOUT_SENSOR;
1409         } else {
1410             return mTimestampBase;
1411         }
1412     }
1413 
1414     /**
1415      * Set the mirroring mode for this output target
1416      *
1417      * <p>If this function is not called, the mirroring mode for this output is
1418      * {@link #MIRROR_MODE_AUTO}, with which the camera API will mirror the output images
1419      * horizontally for front facing camera.</p>
1420      *
1421      * <p>For efficiency, the mirror effect is applied as a transform flag, so it is only effective
1422      * in some outputs. It works automatically for SurfaceView and TextureView outputs. For manual
1423      * use of SurfaceTexture, it is reflected in the value of
1424      * {@link android.graphics.SurfaceTexture#getTransformMatrix}. For other end points, such as
1425      * ImageReader, MediaRecorder, or MediaCodec, the mirror mode has no effect. If mirroring is
1426      * needed for such outputs, the application needs to mirror the image buffers itself before
1427      * passing them onward.</p>
1428      *
1429      * <p>Starting from Android 16, this function sets the mirror modes for all of the output
1430      * surfaces contained within this OutputConfiguration. To set the mirror mode for a particular
1431      * output surface, the application can call {@link #setMirrorMode(Surface, int)}. Prior to
1432      * Android 16, this function is only applicable if surface sharing is not enabled.</p>
1433      */
setMirrorMode(@irrorMode int mirrorMode)1434     public void setMirrorMode(@MirrorMode int mirrorMode) {
1435         // Verify that the value is in range
1436         if (mirrorMode < MIRROR_MODE_AUTO ||
1437                 mirrorMode > MIRROR_MODE_V) {
1438             throw new IllegalArgumentException("Not a valid mirror mode " + mirrorMode);
1439         }
1440         mMirrorMode = mirrorMode;
1441         for (int j = 0; j < mMirrorModeForSurfaces.size(); j++) {
1442             mMirrorModeForSurfaces.set(j, mirrorMode);
1443         }
1444     }
1445 
1446     /**
1447      * Get the current mirroring mode
1448      *
1449      * <p>If no {@link #setMirrorMode} is called first, this function returns
1450      * {@link #MIRROR_MODE_AUTO}.</p>
1451      *
1452      * @return The currently set mirroring mode
1453      */
getMirrorMode()1454     public @MirrorMode int getMirrorMode() {
1455         return mMirrorMode;
1456     }
1457 
1458     /**
1459      * Set the mirroring mode for a surface belonging to this OutputConfiguration
1460      *
1461      * <p>This function is identical to {@link #setMirrorMode(int)} if {@code surface} is
1462      * the only surface belonging to this OutputConfiguration.</p>
1463      *
1464      * <p>If this OutputConfiguration contains a deferred surface, the application can either
1465      * call {@link #setMirrorMode(int)}, or call this function after calling {@link #addSurface}.
1466      * </p>
1467      *
1468      * <p>If this OutputConfiguration contains shared surfaces, the application can set
1469      * different mirroring modes for different surfaces.</p>
1470      *
1471      * <p>For efficiency, the mirror effect is applied as a transform flag, so it is only effective
1472      * in some outputs. It works automatically for SurfaceView and TextureView outputs. For manual
1473      * use of SurfaceTexture, it is reflected in the value of
1474      * {@link android.graphics.SurfaceTexture#getTransformMatrix}. For other end points, such as
1475      * ImageReader, MediaRecorder, or MediaCodec, the mirror mode has no effect. If mirroring is
1476      * needed for such outputs, the application needs to mirror the image buffers itself before
1477      * passing them onward.</p>
1478      *
1479      * @throws IllegalArgumentException If the {@code surface} doesn't belong to this
1480      *                                  OutputConfiguration, or the {@code mirrorMode} value is
1481      *                                  not valid.
1482      */
1483     @FlaggedApi(Flags.FLAG_MIRROR_MODE_SHARED_SURFACES)
setMirrorMode(@onNull Surface surface, @MirrorMode int mirrorMode)1484     public void setMirrorMode(@NonNull Surface surface, @MirrorMode int mirrorMode) {
1485         checkNotNull(surface, "Surface must not be null");
1486         // Verify that the value is in range
1487         if (mirrorMode < MIRROR_MODE_AUTO || mirrorMode > MIRROR_MODE_V) {
1488             throw new IllegalArgumentException("Not a valid mirror mode " + mirrorMode);
1489         }
1490         int surfaceIndex = mSurfaces.indexOf(surface);
1491         if (surfaceIndex == -1) {
1492             throw new IllegalArgumentException("Surface not part of the OutputConfiguration");
1493         }
1494 
1495         mMirrorModeForSurfaces.set(surfaceIndex, mirrorMode);
1496     }
1497 
1498     /**
1499      * Get the current mirroring mode for an output surface
1500      *
1501      * <p>If no {@link #setMirrorMode} is called first, this function returns
1502      * {@link #MIRROR_MODE_AUTO}.</p>
1503      *
1504      * <p>If only {@link #setMirrorMode(int)} is called, the mirroring mode set by that
1505      * function will be returned here as long as the {@code surface} belongs to this
1506      * output configuration.</p>
1507      *
1508      * @throws IllegalArgumentException If the {@code surface} doesn't belong to this
1509      *                                  OutputConfiguration.
1510      *
1511      * @return The mirroring mode for the specified output surface
1512      */
1513     @FlaggedApi(Flags.FLAG_MIRROR_MODE_SHARED_SURFACES)
getMirrorMode(@onNull Surface surface)1514     public @MirrorMode int getMirrorMode(@NonNull Surface surface) {
1515         checkNotNull(surface, "Surface must not be null");
1516 
1517         int surfaceIndex = mSurfaces.indexOf(surface);
1518         if (surfaceIndex == -1) {
1519             throw new IllegalArgumentException("Surface not part of the OutputConfiguration");
1520         }
1521         return mMirrorModeForSurfaces.get(surfaceIndex);
1522     }
1523 
1524     /**
1525      * Use the camera sensor's readout time for the image timestamp.
1526      *
1527      * <p>The start of the camera sensor readout after exposure. For a rolling shutter camera
1528      * sensor, the timestamp is typically equal to {@code (the start of exposure time) +
1529      * (exposure time) + (certain fixed offset)}. The fixed offset can vary per session, depending
1530      * on the underlying sensor configuration. The benefit of using readout time is that when
1531      * camera runs in a fixed frame rate, the timestamp intervals between frames are constant.</p>
1532      *
1533      * <p>Readout timestamp is supported only if {@link
1534      * CameraCharacteristics#SENSOR_READOUT_TIMESTAMP} is
1535      * {@link CameraMetadata#SENSOR_READOUT_TIMESTAMP_HARDWARE}.</p>
1536      *
1537      * <p>As long as readout timestamp is supported, if the timestamp base is
1538      * {@link #TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED}, or if the timestamp base is DEFAULT for a
1539      * SurfaceView output, the image timestamps for the output are always readout time regardless
1540      * of whether this function is called.</p>
1541      *
1542      * @param on The output image timestamp is the start of exposure time if false, and
1543      *           the start of readout time if true.
1544      */
setReadoutTimestampEnabled(boolean on)1545     public void setReadoutTimestampEnabled(boolean on) {
1546         mReadoutTimestampEnabled = on;
1547     }
1548 
1549     /** Whether readout timestamp is used for this OutputConfiguration.
1550      *
1551      * @see #setReadoutTimestampEnabled
1552      */
isReadoutTimestampEnabled()1553     public boolean isReadoutTimestampEnabled() {
1554         return mReadoutTimestampEnabled;
1555     }
1556 
1557     /**
1558      * Create a new {@link OutputConfiguration} instance with another {@link OutputConfiguration}
1559      * instance.
1560      *
1561      * @param other Another {@link OutputConfiguration} instance to be copied.
1562      *
1563      * @hide
1564      */
OutputConfiguration(@onNull OutputConfiguration other)1565     public OutputConfiguration(@NonNull OutputConfiguration other) {
1566         if (other == null) {
1567             throw new IllegalArgumentException("OutputConfiguration shouldn't be null");
1568         }
1569 
1570         this.mSurfaces = other.mSurfaces;
1571         this.mRotation = other.mRotation;
1572         this.mSurfaceGroupId = other.mSurfaceGroupId;
1573         this.mSurfaceType = other.mSurfaceType;
1574         this.mConfiguredDataspace = other.mConfiguredDataspace;
1575         this.mConfiguredFormat = other.mConfiguredFormat;
1576         this.mConfiguredSize = other.mConfiguredSize;
1577         this.mConfiguredGenerationId = other.mConfiguredGenerationId;
1578         this.mIsDeferredConfig = other.mIsDeferredConfig;
1579         this.mIsShared = other.mIsShared;
1580         this.mPhysicalCameraId = other.mPhysicalCameraId;
1581         this.mIsMultiResolution = other.mIsMultiResolution;
1582         this.mSensorPixelModesUsed = other.mSensorPixelModesUsed;
1583         this.mDynamicRangeProfile = other.mDynamicRangeProfile;
1584         this.mColorSpace = other.mColorSpace;
1585         this.mStreamUseCase = other.mStreamUseCase;
1586         this.mTimestampBase = other.mTimestampBase;
1587         this.mMirrorMode = other.mMirrorMode;
1588         this.mMirrorModeForSurfaces = other.mMirrorModeForSurfaces.clone();
1589         this.mReadoutTimestampEnabled = other.mReadoutTimestampEnabled;
1590         this.mUsage = other.mUsage;
1591     }
1592 
1593     /**
1594      * Create an OutputConfiguration from Parcel.
1595      */
OutputConfiguration(@onNull Parcel source)1596     private OutputConfiguration(@NonNull Parcel source) {
1597         int rotation = source.readInt();
1598         int surfaceSetId = source.readInt();
1599         int surfaceType = source.readInt();
1600         int width = source.readInt();
1601         int height = source.readInt();
1602         boolean isDeferred = source.readInt() == 1;
1603         boolean isShared = source.readInt() == 1;
1604         ArrayList<Surface> surfaces = new ArrayList<Surface>();
1605         source.readTypedList(surfaces, Surface.CREATOR);
1606         String physicalCameraId = source.readString();
1607         boolean isMultiResolutionOutput = source.readInt() == 1;
1608         int[] sensorPixelModesUsed = source.createIntArray();
1609 
1610         checkArgumentInRange(rotation, ROTATION_0, ROTATION_270, "Rotation constant");
1611         long dynamicRangeProfile = source.readLong();
1612         DynamicRangeProfiles.checkProfileValue(dynamicRangeProfile);
1613         int colorSpace = source.readInt();
1614         long streamUseCase = source.readLong();
1615 
1616         int timestampBase = source.readInt();
1617         int mirrorMode = source.readInt();
1618         int[] mirrorModeForSurfaces = source.createIntArray();
1619         boolean readoutTimestampEnabled = source.readInt() == 1;
1620         int format = source.readInt();
1621         int dataSpace = source.readInt();
1622         long usage = source.readLong();
1623 
1624         mSurfaceGroupId = surfaceSetId;
1625         mRotation = rotation;
1626         mSurfaces = surfaces;
1627         mConfiguredSize = new Size(width, height);
1628         mIsDeferredConfig = isDeferred;
1629         mIsShared = isShared;
1630         mUsage = 0;
1631         if (mSurfaces.size() > 0) {
1632             mSurfaceType = SURFACE_TYPE_UNKNOWN;
1633             mConfiguredFormat = SurfaceUtils.getSurfaceFormat(mSurfaces.get(0));
1634             mConfiguredDataspace = SurfaceUtils.getSurfaceDataspace(mSurfaces.get(0));
1635             mConfiguredGenerationId = mSurfaces.get(0).getGenerationId();
1636         } else {
1637             mSurfaceType = surfaceType;
1638             if (mSurfaceType != SURFACE_TYPE_IMAGE_READER) {
1639                 mConfiguredFormat = StreamConfigurationMap.imageFormatToInternal(
1640                         ImageFormat.PRIVATE);
1641                 mConfiguredDataspace =
1642                         StreamConfigurationMap.imageFormatToDataspace(ImageFormat.PRIVATE);
1643             } else {
1644                 mConfiguredFormat = format;
1645                 mConfiguredDataspace = dataSpace;
1646                 mUsage = usage;
1647             }
1648             mConfiguredGenerationId = 0;
1649         }
1650         mPhysicalCameraId = physicalCameraId;
1651         mIsMultiResolution = isMultiResolutionOutput;
1652         mSensorPixelModesUsed = convertIntArrayToIntegerList(sensorPixelModesUsed);
1653         mDynamicRangeProfile = dynamicRangeProfile;
1654         mColorSpace = colorSpace;
1655         mStreamUseCase = streamUseCase;
1656         mTimestampBase = timestampBase;
1657         mMirrorMode = mirrorMode;
1658         mMirrorModeForSurfaces = IntArray.wrap(mirrorModeForSurfaces);
1659         mReadoutTimestampEnabled = readoutTimestampEnabled;
1660     }
1661 
1662     /**
1663      * Get the maximum supported shared {@link Surface} count.
1664      *
1665      * @return the maximum number of surfaces that can be added per each OutputConfiguration.
1666      *
1667      * @see #enableSurfaceSharing
1668      */
getMaxSharedSurfaceCount()1669     public int getMaxSharedSurfaceCount() {
1670         return MAX_SURFACES_COUNT;
1671     }
1672 
1673     /**
1674      * Get the {@link Surface} associated with this {@link OutputConfiguration}.
1675      *
1676      * If more than one surface is associated with this {@link OutputConfiguration}, return the
1677      * first one as specified in the constructor or {@link OutputConfiguration#addSurface}.
1678      */
getSurface()1679     public @Nullable Surface getSurface() {
1680         if (mSurfaces.size() == 0) {
1681             return null;
1682         }
1683 
1684         return mSurfaces.get(0);
1685     }
1686 
1687     /**
1688      * Get the immutable list of surfaces associated with this {@link OutputConfiguration}.
1689      *
1690      * @return the list of surfaces associated with this {@link OutputConfiguration} as specified in
1691      * the constructor and {@link OutputConfiguration#addSurface}. The list should not be modified.
1692      */
1693     @NonNull
getSurfaces()1694     public List<Surface> getSurfaces() {
1695         return Collections.unmodifiableList(mSurfaces);
1696     }
1697 
1698     /**
1699      * Get the rotation associated with this {@link OutputConfiguration}.
1700      *
1701      * @return the rotation associated with this {@link OutputConfiguration}.
1702      *         Value will be one of ROTATION_[0, 90, 180, 270]
1703      *
1704      * @hide
1705      */
1706     @SystemApi
getRotation()1707     public int getRotation() {
1708         return mRotation;
1709     }
1710 
1711     /**
1712      * Get the surface group ID associated with this {@link OutputConfiguration}.
1713      *
1714      * @return the surface group ID associated with this {@link OutputConfiguration}.
1715      *         The default value is {@value #SURFACE_GROUP_ID_NONE}.
1716      */
getSurfaceGroupId()1717     public int getSurfaceGroupId() {
1718         return mSurfaceGroupId;
1719     }
1720 
1721     /**
1722      * Get the configured size associated with this {@link OutputConfiguration}.
1723      *
1724      * @return The configured size associated with this {@link OutputConfiguration}.
1725      *
1726      * @hide
1727      */
getConfiguredSize()1728     public Size getConfiguredSize() {
1729         return mConfiguredSize;
1730     }
1731 
1732     /**
1733      * Get the configured format associated with this {@link OutputConfiguration}.
1734      *
1735      * @return {@link android.graphics.ImageFormat#Format} associated with this
1736      *         {@link OutputConfiguration}.
1737      *
1738      * @hide
1739      */
getConfiguredFormat()1740     public @Format int getConfiguredFormat() {
1741         return mConfiguredFormat;
1742     }
1743 
1744     /**
1745      * Get the usage flag associated with this {@link OutputConfiguration}.
1746      *
1747      * @return {@link HardwareBuffer#Usage} associated with this {@link OutputConfiguration}.
1748      *
1749      * @hide
1750      */
getUsage()1751     public @Usage long getUsage() {
1752         return mUsage;
1753     }
1754 
1755     /**
1756      * Get the surface type associated with this {@link OutputConfiguration}.
1757      *
1758      * @return The surface type associated with this {@link OutputConfiguration}.
1759      *
1760      * @see #SURFACE_TYPE_SURFACE_VIEW
1761      * @see #SURFACE_TYPE_SURFACE_TEXTURE
1762      * @see #SURFACE_TYPE_MEDIA_RECORDER
1763      * @see #SURFACE_TYPE_MEDIA_CODEC
1764      * @see #SURFACE_TYPE_IMAGE_READER
1765      * @see #SURFACE_TYPE_UNKNOWN
1766      * @hide
1767      */
getSurfaceType()1768     public int getSurfaceType() {
1769         return mSurfaceType;
1770     }
1771 
1772     /**
1773      * Get the sensor pixel modes associated with this {@link OutputConfiguration}.
1774      *
1775      * @return List of {@link #SensorPixelMode} associated with this {@link OutputConfiguration}.
1776      *
1777      * @hide
1778      */
getSensorPixelModes()1779     public @NonNull List<Integer> getSensorPixelModes() {
1780         return mSensorPixelModesUsed;
1781     }
1782 
1783      /**
1784      * Get the sharing mode associated with this {@link OutputConfiguration}.
1785      *
1786      * @return true if surface sharing is enabled with this {@link OutputConfiguration}.
1787      *
1788      * @hide
1789      */
isShared()1790     public boolean isShared() {
1791         return mIsShared;
1792     }
1793 
1794     /**
1795      * Get the dataspace associated with this {@link OutputConfiguration}.
1796      *
1797      * @return {@link Dataspace#NamedDataSpace} for this {@link OutputConfiguration}.
1798      *
1799      * @hide
1800      */
getConfiguredDataspace()1801     public @NamedDataSpace int getConfiguredDataspace() {
1802         return mConfiguredDataspace;
1803     }
1804 
1805     /**
1806      * Get the flag indicating if this {@link OutputConfiguration} is for a multi-resolution output
1807      * with a MultiResolutionImageReader.
1808      *
1809      * @return true if this {@link OutputConfiguration} is for a multi-resolution output with a
1810      *              MultiResolutionImageReader.
1811      *
1812      * @hide
1813      */
isMultiResolution()1814     public boolean isMultiResolution() {
1815         return mIsMultiResolution;
1816     }
1817 
1818     /**
1819      * Get the physical camera ID associated with this {@link OutputConfiguration}.
1820      *
1821      * <p>If this OutputConfiguration isn't targeting a physical camera of a logical
1822      * multi-camera, this function returns {@code null}.</p>
1823      *
1824      * @return The physical camera Id associated with this {@link OutputConfiguration}.
1825      *
1826      * @hide
1827      */
getPhysicalCameraId()1828     public @Nullable String getPhysicalCameraId() {
1829         return mPhysicalCameraId;
1830     }
1831 
1832     public static final @android.annotation.NonNull Parcelable.Creator<OutputConfiguration> CREATOR =
1833             new Parcelable.Creator<OutputConfiguration>() {
1834         @Override
1835         public OutputConfiguration createFromParcel(Parcel source) {
1836             return new OutputConfiguration(source);
1837         }
1838 
1839         @Override
1840         public OutputConfiguration[] newArray(int size) {
1841             return new OutputConfiguration[size];
1842         }
1843     };
1844 
1845     @Override
describeContents()1846     public int describeContents() {
1847         return 0;
1848     }
1849 
convertIntegerToIntList(List<Integer> integerList)1850     private static int[] convertIntegerToIntList(List<Integer> integerList) {
1851         int[] integerArray = new int[integerList.size()];
1852         for (int i = 0; i < integerList.size(); i++) {
1853             integerArray[i] = integerList.get(i);
1854         }
1855         return integerArray;
1856     }
1857 
convertIntArrayToIntegerList(int[] intArray)1858     private static ArrayList<Integer> convertIntArrayToIntegerList(int[] intArray) {
1859         ArrayList<Integer> integerList = new ArrayList<Integer>();
1860         if (intArray == null) {
1861             return integerList;
1862         }
1863         for (int i = 0; i < intArray.length; i++) {
1864             integerList.add(intArray[i]);
1865         }
1866         return integerList;
1867     }
1868 
1869     @Override
writeToParcel(Parcel dest, int flags)1870     public void writeToParcel(Parcel dest, int flags) {
1871         if (dest == null) {
1872             throw new IllegalArgumentException("dest must not be null");
1873         }
1874         dest.writeInt(mRotation);
1875         dest.writeInt(mSurfaceGroupId);
1876         dest.writeInt(mSurfaceType);
1877         dest.writeInt(mConfiguredSize.getWidth());
1878         dest.writeInt(mConfiguredSize.getHeight());
1879         dest.writeInt(mIsDeferredConfig ? 1 : 0);
1880         dest.writeInt(mIsShared ? 1 : 0);
1881         dest.writeTypedList(mSurfaces);
1882         dest.writeString(mPhysicalCameraId);
1883         dest.writeInt(mIsMultiResolution ? 1 : 0);
1884         // writeList doesn't seem to work well with Integer list.
1885         dest.writeIntArray(convertIntegerToIntList(mSensorPixelModesUsed));
1886         dest.writeLong(mDynamicRangeProfile);
1887         dest.writeInt(mColorSpace);
1888         dest.writeLong(mStreamUseCase);
1889         dest.writeInt(mTimestampBase);
1890         dest.writeInt(mMirrorMode);
1891         dest.writeIntArray(mMirrorModeForSurfaces.toArray());
1892         dest.writeInt(mReadoutTimestampEnabled ? 1 : 0);
1893         dest.writeInt(mConfiguredFormat);
1894         dest.writeInt(mConfiguredDataspace);
1895         dest.writeLong(mUsage);
1896     }
1897 
1898     /**
1899      * Check if this {@link OutputConfiguration} is equal to another {@link OutputConfiguration}.
1900      *
1901      * <p>Two output configurations are only equal if and only if the underlying surfaces, surface
1902      * properties (width, height, format, dataspace) when the output configurations are created,
1903      * and all other configuration parameters are equal. </p>
1904      *
1905      * @return {@code true} if the objects were equal, {@code false} otherwise
1906      */
1907     @Override
equals(@ullable Object obj)1908     public boolean equals(@Nullable Object obj) {
1909         if (obj == null) {
1910             return false;
1911         } else if (this == obj) {
1912             return true;
1913         } else if (obj instanceof OutputConfiguration) {
1914             final OutputConfiguration other = (OutputConfiguration) obj;
1915             if (mRotation != other.mRotation
1916                     || !mConfiguredSize.equals(other.mConfiguredSize)
1917                     || mConfiguredFormat != other.mConfiguredFormat
1918                     || mSurfaceGroupId != other.mSurfaceGroupId
1919                     || mSurfaceType != other.mSurfaceType
1920                     || mIsDeferredConfig != other.mIsDeferredConfig
1921                     || mIsShared != other.mIsShared
1922                     || mConfiguredDataspace != other.mConfiguredDataspace
1923                     || mConfiguredGenerationId != other.mConfiguredGenerationId
1924                     || !Objects.equals(mPhysicalCameraId, other.mPhysicalCameraId)
1925                     || mIsMultiResolution != other.mIsMultiResolution
1926                     || mStreamUseCase != other.mStreamUseCase
1927                     || mTimestampBase != other.mTimestampBase
1928                     || mMirrorMode != other.mMirrorMode
1929                     || mReadoutTimestampEnabled != other.mReadoutTimestampEnabled
1930                     || mUsage != other.mUsage) {
1931                 return false;
1932             }
1933             if (mSensorPixelModesUsed.size() != other.mSensorPixelModesUsed.size()) {
1934                 return false;
1935             }
1936             for (int j = 0; j < mSensorPixelModesUsed.size(); j++) {
1937                 if (!Objects.equals(
1938                         mSensorPixelModesUsed.get(j), other.mSensorPixelModesUsed.get(j))) {
1939                     return false;
1940                 }
1941             }
1942             if (Flags.mirrorModeSharedSurfaces()) {
1943                 if (mMirrorModeForSurfaces.size() != other.mMirrorModeForSurfaces.size()) {
1944                     return false;
1945                 }
1946                 for (int j = 0; j < mMirrorModeForSurfaces.size(); j++) {
1947                     if (mMirrorModeForSurfaces.get(j) != other.mMirrorModeForSurfaces.get(j)) {
1948                         return false;
1949                     }
1950                 }
1951             }
1952             int minLen = Math.min(mSurfaces.size(), other.mSurfaces.size());
1953             for (int i = 0;  i < minLen; i++) {
1954                 if (mSurfaces.get(i) != other.mSurfaces.get(i))
1955                     return false;
1956             }
1957             if (!mIsDeferredConfig && mSurfaces.size() != other.mSurfaces.size()) return false;
1958             if (mDynamicRangeProfile != other.mDynamicRangeProfile) {
1959                 return false;
1960             }
1961             if (mColorSpace != other.mColorSpace) {
1962                 return false;
1963             }
1964 
1965             return true;
1966         }
1967         return false;
1968     }
1969 
1970     /**
1971      * Get and increase the next MultiResolution group id.
1972      *
1973      * If the ID reaches -1, skip it.
1974      */
getAndIncreaseMultiResolutionGroupId()1975     private static int getAndIncreaseMultiResolutionGroupId() {
1976         return sNextMultiResolutionGroupId.getAndUpdate(i ->
1977                 i + 1 == SURFACE_GROUP_ID_NONE ? i + 2 : i + 1);
1978     }
1979 
1980     /**
1981      * {@inheritDoc}
1982      */
1983     @Override
hashCode()1984     public int hashCode() {
1985         // Need ensure that the hashcode remains unchanged after adding a deferred surface.
1986         // Otherwise the deferred output configuration will be lost in the camera stream map
1987         // after the deferred surface is set.
1988         if (mIsDeferredConfig) {
1989             return HashCodeHelpers.hashCode(
1990                     mRotation, mConfiguredSize.hashCode(), mConfiguredFormat, mConfiguredDataspace,
1991                     mSurfaceGroupId, mSurfaceType, mIsShared ? 1 : 0,
1992                     mPhysicalCameraId == null ? 0 : mPhysicalCameraId.hashCode(),
1993                     mIsMultiResolution ? 1 : 0, mSensorPixelModesUsed.hashCode(),
1994                     mDynamicRangeProfile, mColorSpace, mStreamUseCase,
1995                     mTimestampBase, mMirrorMode,
1996                     HashCodeHelpers.hashCode(mMirrorModeForSurfaces.toArray()),
1997                     mReadoutTimestampEnabled ? 1 : 0, Long.hashCode(mUsage));
1998         }
1999 
2000         return HashCodeHelpers.hashCode(
2001                 mRotation, mSurfaces.hashCode(), mConfiguredGenerationId,
2002                 mConfiguredSize.hashCode(), mConfiguredFormat,
2003                 mConfiguredDataspace, mSurfaceGroupId, mIsShared ? 1 : 0,
2004                 mPhysicalCameraId == null ? 0 : mPhysicalCameraId.hashCode(),
2005                 mIsMultiResolution ? 1 : 0, mSensorPixelModesUsed.hashCode(),
2006                 mDynamicRangeProfile, mColorSpace, mStreamUseCase, mTimestampBase,
2007                 mMirrorMode, HashCodeHelpers.hashCode(mMirrorModeForSurfaces.toArray()),
2008                 mReadoutTimestampEnabled ? 1 : 0,
2009                 Long.hashCode(mUsage));
2010     }
2011 
2012     private static final String TAG = "OutputConfiguration";
2013 
2014     // A surfaceGroupId counter used for MultiResolutionImageReader. Its value is
2015     // incremented every time {@link createInstancesForMultiResolutionOutput} is called.
2016     private static AtomicInteger sNextMultiResolutionGroupId = new AtomicInteger(0);
2017 
2018     private ArrayList<Surface> mSurfaces;
2019     private final int mRotation;
2020     private final int mSurfaceGroupId;
2021     // Surface source type, this is only used by the deferred surface configuration objects.
2022     private final int mSurfaceType;
2023 
2024     // The size, format, and dataspace of the surface when OutputConfiguration is created.
2025     private final Size mConfiguredSize;
2026     private final int mConfiguredFormat;
2027     private final int mConfiguredDataspace;
2028     // Surface generation ID to distinguish changes to Surface native internals
2029     private final int mConfiguredGenerationId;
2030     // Flag indicating if this config has deferred surface.
2031     private final boolean mIsDeferredConfig;
2032     // Flag indicating if this config has shared surfaces
2033     private boolean mIsShared;
2034     // The physical camera id that this output configuration is for.
2035     private String mPhysicalCameraId;
2036     // Flag indicating if this config is for a multi-resolution output with a
2037     // MultiResolutionImageReader
2038     private boolean mIsMultiResolution;
2039     // The sensor pixel modes that this OutputConfiguration will use
2040     private ArrayList<Integer> mSensorPixelModesUsed;
2041     // Dynamic range profile
2042     private long mDynamicRangeProfile;
2043     // Color space
2044     private int mColorSpace;
2045     // Stream use case
2046     private long mStreamUseCase;
2047     // Timestamp base
2048     private int mTimestampBase;
2049     // Mirroring mode
2050     private int mMirrorMode;
2051     // Per-surface mirror modes
2052     private IntArray mMirrorModeForSurfaces;
2053     // readout timestamp
2054     private boolean mReadoutTimestampEnabled;
2055     // Whether the timestamp base is set to READOUT_SENSOR
2056     private boolean mIsReadoutSensorTimestampBase;
2057     // The usage flags. Only set for instances created for ImageReader without specifying surface.
2058     private long mUsage;
2059 }
2060