• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 
18 package android.hardware.camera2.params;
19 
20 import static com.android.internal.util.Preconditions.*;
21 
22 import android.annotation.IntDef;
23 import android.annotation.NonNull;
24 import android.annotation.Nullable;
25 import android.annotation.SystemApi;
26 import android.graphics.ImageFormat;
27 import android.hardware.camera2.CameraCaptureSession;
28 import android.hardware.camera2.CameraCharacteristics;
29 import android.hardware.camera2.CameraDevice;
30 import android.hardware.camera2.CameraMetadata;
31 import android.hardware.camera2.MultiResolutionImageReader;
32 import android.hardware.camera2.params.DynamicRangeProfiles;
33 import android.hardware.camera2.params.DynamicRangeProfiles.Profile;
34 import android.hardware.camera2.params.MultiResolutionStreamInfo;
35 import android.hardware.camera2.utils.HashCodeHelpers;
36 import android.hardware.camera2.utils.SurfaceUtils;
37 import android.media.ImageReader;
38 import android.os.Parcel;
39 import android.os.Parcelable;
40 import android.util.Log;
41 import android.util.Size;
42 import android.view.Surface;
43 
44 import java.lang.annotation.Retention;
45 import java.lang.annotation.RetentionPolicy;
46 import java.util.ArrayList;
47 import java.util.Collection;
48 import java.util.Collections;
49 import java.util.List;
50 import java.util.Objects;
51 
52 /**
53  * A class for describing camera output, which contains a {@link Surface} and its specific
54  * configuration for creating capture session.
55  *
56  * <p>There are several ways to instantiate, modify and use OutputConfigurations. The most common
57  * and recommended usage patterns are summarized in the following list:</p>
58  *<ul>
59  * <li>Passing a {@link Surface} to the constructor and using the OutputConfiguration instance as
60  * argument to {@link CameraDevice#createCaptureSessionByOutputConfigurations}. This is the most
61  * frequent usage and clients should consider it first before other more complicated alternatives.
62  * </li>
63  *
64  * <li>Passing only a surface source class as an argument to the constructor. This is usually
65  * followed by a call to create a capture session
66  * (see {@link CameraDevice#createCaptureSessionByOutputConfigurations} and a {@link Surface} add
67  * call {@link #addSurface} with a valid {@link Surface}. The sequence completes with
68  * {@link CameraCaptureSession#finalizeOutputConfigurations}. This is the deferred usage case which
69  * aims to enhance performance by allowing the resource-intensive capture session create call to
70  * execute in parallel with any {@link Surface} initialization, such as waiting for a
71  * {@link android.view.SurfaceView} to be ready as part of the UI initialization.</li>
72  *
73  * <li>The third and most complex usage pattern involves surface sharing. Once instantiated an
74  * OutputConfiguration can be enabled for surface sharing via {@link #enableSurfaceSharing}. This
75  * must be done before creating a new capture session and enables calls to
76  * {@link CameraCaptureSession#updateOutputConfiguration}. An OutputConfiguration with enabled
77  * surface sharing can be modified via {@link #addSurface} or {@link #removeSurface}. The updates
78  * to this OutputConfiguration will only come into effect after
79  * {@link CameraCaptureSession#updateOutputConfiguration} returns without throwing exceptions.
80  * Such updates can be done as long as the session is active. Clients should always consider the
81  * additional requirements and limitations placed on the output surfaces (for more details see
82  * {@link #enableSurfaceSharing}, {@link #addSurface}, {@link #removeSurface},
83  * {@link CameraCaptureSession#updateOutputConfiguration}). A trade-off exists between additional
84  * complexity and flexibility. If exercised correctly surface sharing can switch between different
85  * output surfaces without interrupting any ongoing repeating capture requests. This saves time and
86  * can significantly improve the user experience.</li>
87  *
88  * <li>Surface sharing can be used in combination with deferred surfaces. The rules from both cases
89  * are combined and clients must call {@link #enableSurfaceSharing} before creating a capture
90  * session. Attach and/or remove output surfaces via  {@link #addSurface}/{@link #removeSurface} and
91  * finalize the configuration using {@link CameraCaptureSession#finalizeOutputConfigurations}.
92  * {@link CameraCaptureSession#updateOutputConfiguration} can be called after the configuration
93  * finalize method returns without exceptions.</li>
94  *
95  * <li>If the camera device supports multi-resolution output streams, {@link
96  * CameraCharacteristics#SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP} will contain the
97  * formats and their corresponding stream info. The application can use an OutputConfiguration
98  * created with the multi-resolution stream info queried from {@link
99  * MultiResolutionStreamConfigurationMap#getOutputInfo} and
100  * {@link android.hardware.camera2.MultiResolutionImageReader} to capture variable size images.
101  *
102  * </ul>
103  *
104  * <p> As of {@link android.os.Build.VERSION_CODES#P Android P}, all formats except
105  * {@link ImageFormat#JPEG} and {@link ImageFormat#RAW_PRIVATE} can be used for sharing, subject to
106  * device support. On prior API levels, only {@link ImageFormat#PRIVATE} format may be used.</p>
107  *
108  * @see CameraDevice#createCaptureSessionByOutputConfigurations
109  * @see CameraCharacteristics#SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP
110  *
111  */
112 public final class OutputConfiguration implements Parcelable {
113 
114     /**
115      * Rotation constant: 0 degree rotation (no rotation)
116      *
117      * @hide
118      */
119     @SystemApi
120     public static final int ROTATION_0 = 0;
121 
122     /**
123      * Rotation constant: 90 degree counterclockwise rotation.
124      *
125      * @hide
126      */
127     @SystemApi
128     public static final int ROTATION_90 = 1;
129 
130     /**
131      * Rotation constant: 180 degree counterclockwise rotation.
132      *
133      * @hide
134      */
135     @SystemApi
136     public static final int ROTATION_180 = 2;
137 
138     /**
139      * Rotation constant: 270 degree counterclockwise rotation.
140      *
141      * @hide
142      */
143     @SystemApi
144     public static final int ROTATION_270 = 3;
145 
146     /**
147      * Invalid surface group ID.
148      *
149      *<p>An {@link OutputConfiguration} with this value indicates that the included surface
150      *doesn't belong to any surface group.</p>
151      */
152     public static final int SURFACE_GROUP_ID_NONE = -1;
153 
154     /**
155      * Default timestamp base.
156      *
157      * <p>The camera device decides the timestamp based on the properties of the
158      * output surface.</p>
159      *
160      * <li> For a SurfaceView output surface, the timestamp base is {@link
161      * #TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED}. The timestamp is overridden with choreographer
162      * pulses from the display subsystem for smoother display of camera frames when the camera
163      * device runs in fixed frame rate. The timestamp is roughly in the same time base as
164      * {@link android.os.SystemClock#uptimeMillis}.</li>
165      * <li> For an output surface of MediaRecorder, MediaCodec, or ImageReader with {@link
166      * android.hardware.HardwareBuffer#USAGE_VIDEO_ENCODE} usge flag, the timestamp base is
167      * {@link #TIMESTAMP_BASE_MONOTONIC}, which is roughly the same time base as
168      * {@link android.os.SystemClock#uptimeMillis}.</li>
169      * <li> For all other cases, the timestamp base is {@link #TIMESTAMP_BASE_SENSOR}, the same
170      * as what's specified by {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}.
171      * <ul><li> For a SurfaceTexture output surface, the camera system re-spaces the delivery
172      * of output frames based on image readout intervals, reducing viewfinder jitter. The timestamps
173      * of images remain to be {@link #TIMESTAMP_BASE_SENSOR}.</li></ul></li>
174      *
175      * <p>Note that the reduction of frame jitter for SurfaceView and SurfaceTexture comes with
176      * slight increase in photon-to-photon latency, which is the time from when photons hit the
177      * scene to when the corresponding pixels show up on the screen. If the photon-to-photon latency
178      * is more important than the smoothness of viewfinder, {@link #TIMESTAMP_BASE_SENSOR} should be
179      * used instead.</p>
180      *
181      * @see #TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED
182      * @see #TIMESTAMP_BASE_MONOTONIC
183      * @see #TIMESTAMP_BASE_SENSOR
184      */
185     public static final int TIMESTAMP_BASE_DEFAULT = 0;
186 
187     /**
188      * Timestamp base of {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}.
189      *
190      * <p>The timestamps of the output images are in the time base as specified by {@link
191      * CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}. The application can look up the
192      * corresponding result metadata for a particular output image using this timestamp.</p>
193      */
194     public static final int TIMESTAMP_BASE_SENSOR = 1;
195 
196     /**
197      * Timestamp base roughly the same as {@link android.os.SystemClock#uptimeMillis}.
198      *
199      * <p>The timestamps of the output images are monotonically increasing, and are roughly in the
200      * same time base as {@link android.os.SystemClock#uptimeMillis}. The timestamps with this
201      * time base can be directly used for audio-video sync in video recording.</p>
202      *
203      * <p>If the camera device's {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE} is
204      * REALTIME, timestamps with this time base cannot directly match the timestamps in
205      * {@link CameraCaptureSession.CaptureCallback#onCaptureStarted} or the sensor timestamps in
206      * {@link android.hardware.camera2.CaptureResult}.</p>
207      */
208     public static final int TIMESTAMP_BASE_MONOTONIC = 2;
209 
210     /**
211      * Timestamp base roughly the same as {@link android.os.SystemClock#elapsedRealtime}.
212      *
213      * <p>The timestamps of the output images are roughly in the
214      * same time base as {@link android.os.SystemClock#elapsedRealtime}. The timestamps with this
215      * time base cannot be directly used for audio-video sync in video recording.</p>
216      *
217      * <p>If the camera device's {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE} is
218      * UNKNOWN, timestamps with this time base cannot directly match the timestamps in
219      * {@link CameraCaptureSession.CaptureCallback#onCaptureStarted} or the sensor timestamps in
220      * {@link android.hardware.camera2.CaptureResult}.</p>
221      *
222      * <p>If using a REALTIME timestamp base on a device that supports only
223      * TIMESTAMP_SOURCE_UNKNOWN, the accuracy of timestamps is only what is guaranteed in the
224      * documentation for UNKNOWN. In particular, they have no guarantees about being accurate
225      * enough to use in fusing image data with the output of inertial sensors, for features such as
226      * image stabilization or augmented reality.</p>
227      */
228     public static final int TIMESTAMP_BASE_REALTIME = 3;
229 
230     /**
231      * Timestamp is synchronized to choreographer.
232      *
233      * <p>The timestamp of the output images are overridden with choreographer pulses from the
234      * display subsystem for smoother display of camera frames. An output target of SurfaceView
235      * uses this time base by default. Note that the timestamp override is done for fixed camera
236      * frame rate only.</p>
237      *
238      * <p>This timestamp base isn't applicable to SurfaceTexture targets. SurfaceTexture's
239      * {@link android.graphics.SurfaceTexture#updateTexImage updateTexImage} function always
240      * uses the latest image from the camera stream. In the case of a TextureView, the image is
241      * displayed right away.</p>
242      *
243      * <p>Timestamps with this time base cannot directly match the timestamps in
244      * {@link CameraCaptureSession.CaptureCallback#onCaptureStarted} or the sensor timestamps in
245      * {@link android.hardware.camera2.CaptureResult}. This timestamp base shouldn't be used if the
246      * timestamp needs to be used for audio-video synchronization.</p>
247      */
248     public static final int TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED = 4;
249 
250     /**
251      * Timestamp is the start of readout in the same time domain as TIMESTAMP_BASE_SENSOR.
252      *
253      * <p>The start of the camera sensor readout after exposure. For a rolling shutter camera
254      * sensor, the timestamp is typically equal to the start of exposure time +
255      * exposure time + certain fixed offset. The fixed offset could be due to camera sensor
256      * level crop. The benefit of using readout time is that when camera runs in a fixed
257      * frame rate, the timestamp intervals between frames are constant.</p>
258      *
259      * <p>This timestamp is in the same time domain as in TIMESTAMP_BASE_SENSOR, with the exception
260      * that one is start of exposure, and the other is start of readout.</p>
261      *
262      * <p>This timestamp base is supported only if {@link
263      * CameraCharacteristics#SENSOR_READOUT_TIMESTAMP} is
264      * {@link CameraMetadata#SENSOR_READOUT_TIMESTAMP_HARDWARE}.</p>
265      *
266      * @hide
267      */
268     public static final int TIMESTAMP_BASE_READOUT_SENSOR = 5;
269 
270     /** @hide */
271     @Retention(RetentionPolicy.SOURCE)
272     @IntDef(prefix = {"TIMESTAMP_BASE_"}, value =
273         {TIMESTAMP_BASE_DEFAULT,
274          TIMESTAMP_BASE_SENSOR,
275          TIMESTAMP_BASE_MONOTONIC,
276          TIMESTAMP_BASE_REALTIME,
277          TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED,
278          TIMESTAMP_BASE_READOUT_SENSOR})
279     public @interface TimestampBase {};
280 
281     /** @hide */
282      @Retention(RetentionPolicy.SOURCE)
283      @IntDef(prefix = {"SENSOR_PIXEL_MODE_"}, value =
284          {CameraMetadata.SENSOR_PIXEL_MODE_DEFAULT,
285           CameraMetadata.SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION})
286      public @interface SensorPixelMode {};
287 
288     /** @hide */
289     @Retention(RetentionPolicy.SOURCE)
290     @IntDef(prefix = {"STREAM_USE_CASE_"}, value =
291         {CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
292          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
293          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
294          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
295          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
296          CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL})
297     public @interface StreamUseCase {};
298 
299     /**
300      * Automatic mirroring based on camera facing
301      *
302      * <p>This is the default mirroring mode for the camera device. With this mode,
303      * the camera output is mirrored horizontally for front-facing cameras. There is
304      * no mirroring for rear-facing and external cameras.</p>
305      */
306     public static final int MIRROR_MODE_AUTO = 0;
307 
308     /**
309      * No mirror transform is applied
310      *
311      * <p>No mirroring is applied to the camera output regardless of the camera facing.</p>
312      */
313     public static final int MIRROR_MODE_NONE = 1;
314 
315     /**
316      * Camera output is mirrored horizontally
317      *
318      * <p>The camera output is mirrored horizontally, the same behavior as in AUTO mode for
319      * front facing camera.</p>
320      */
321     public static final int MIRROR_MODE_H = 2;
322 
323     /**
324      * Camera output is mirrored vertically
325      */
326     public static final int MIRROR_MODE_V = 3;
327 
328     /** @hide */
329     @Retention(RetentionPolicy.SOURCE)
330     @IntDef(prefix = {"MIRROR_MODE_"}, value =
331         {MIRROR_MODE_AUTO,
332           MIRROR_MODE_NONE,
333           MIRROR_MODE_H,
334           MIRROR_MODE_V})
335     public @interface MirrorMode {};
336 
337     /**
338      * Create a new {@link OutputConfiguration} instance with a {@link Surface}.
339      *
340      * @param surface
341      *          A Surface for camera to output to.
342      *
343      * <p>This constructor creates a default configuration, with a surface group ID of
344      * {@value #SURFACE_GROUP_ID_NONE}.</p>
345      *
346      */
OutputConfiguration(@onNull Surface surface)347     public OutputConfiguration(@NonNull Surface surface) {
348         this(SURFACE_GROUP_ID_NONE, surface, ROTATION_0);
349     }
350 
351     /**
352      * Unknown surface source type.
353      */
354     private final int SURFACE_TYPE_UNKNOWN = -1;
355 
356     /**
357      * The surface is obtained from {@link android.view.SurfaceView}.
358      */
359     private final int SURFACE_TYPE_SURFACE_VIEW = 0;
360 
361     /**
362      * The surface is obtained from {@link android.graphics.SurfaceTexture}.
363      */
364     private final int SURFACE_TYPE_SURFACE_TEXTURE = 1;
365 
366     /**
367      * Maximum number of surfaces supported by one {@link OutputConfiguration}.
368      *
369      * <p>The combined number of surfaces added by the constructor and
370      * {@link OutputConfiguration#addSurface} should not exceed this value.</p>
371      *
372      */
373     private static final int MAX_SURFACES_COUNT = 4;
374 
375     /**
376      * Create a new {@link OutputConfiguration} instance with a {@link Surface},
377      * with a surface group ID.
378      *
379      * <p>
380      * A surface group ID is used to identify which surface group this output surface belongs to. A
381      * surface group is a group of output surfaces that are not intended to receive camera output
382      * buffer streams simultaneously. The {@link CameraDevice} may be able to share the buffers used
383      * by all the surfaces from the same surface group, therefore may reduce the overall memory
384      * footprint. The application should only set the same set ID for the streams that are not
385      * simultaneously streaming. A negative ID indicates that this surface doesn't belong to any
386      * surface group. The default value is {@value #SURFACE_GROUP_ID_NONE}.</p>
387      *
388      * <p>For example, a video chat application that has an adaptive output resolution feature would
389      * need two (or more) output resolutions, to switch resolutions without any output glitches.
390      * However, at any given time, only one output is active to minimize outgoing network bandwidth
391      * and encoding overhead.  To save memory, the application should set the video outputs to have
392      * the same non-negative group ID, so that the camera device can share the same memory region
393      * for the alternating outputs.</p>
394      *
395      * <p>It is not an error to include output streams with the same group ID in the same capture
396      * request, but the resulting memory consumption may be higher than if the two streams were
397      * not in the same surface group to begin with, especially if the outputs have substantially
398      * different dimensions.</p>
399      *
400      * @param surfaceGroupId
401      *          A group ID for this output, used for sharing memory between multiple outputs.
402      * @param surface
403      *          A Surface for camera to output to.
404      *
405      */
OutputConfiguration(int surfaceGroupId, @NonNull Surface surface)406     public OutputConfiguration(int surfaceGroupId, @NonNull Surface surface) {
407         this(surfaceGroupId, surface, ROTATION_0);
408     }
409 
410     /**
411      * Set the multi-resolution output flag.
412      *
413      * <p>Specify that this OutputConfiguration is part of a multi-resolution output stream group
414      * used by {@link android.hardware.camera2.MultiResolutionImageReader}.</p>
415      *
416      * <p>This function must only be called for an OutputConfiguration with a non-negative
417      * group ID. And all OutputConfigurations of a MultiResolutionImageReader will have the same
418      * group ID and have this flag set.</p>
419      *
420      * @throws IllegalStateException If surface sharing is enabled via {@link #enableSurfaceSharing}
421      *         call, or no non-negative group ID has been set.
422      * @hide
423      */
setMultiResolutionOutput()424     public void setMultiResolutionOutput() {
425         if (mIsShared) {
426             throw new IllegalStateException("Multi-resolution output flag must not be set for " +
427                     "configuration with surface sharing");
428         }
429         if (mSurfaceGroupId == SURFACE_GROUP_ID_NONE) {
430             throw new IllegalStateException("Multi-resolution output flag should only be set for " +
431                     "surface with non-negative group ID");
432         }
433 
434         mIsMultiResolution = true;
435     }
436 
437     /**
438      * Set a specific device supported dynamic range profile.
439      *
440      * <p>Clients can choose from any profile advertised as supported in
441      * CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES
442      * queried using {@link DynamicRangeProfiles#getSupportedProfiles()}.
443      * If this is not explicitly set, then the default profile will be
444      * {@link DynamicRangeProfiles#STANDARD}.</p>
445      *
446      * <p>Do note that invalid combinations between the registered output
447      * surface pixel format and the configured dynamic range profile will
448      * cause capture session initialization failure. Invalid combinations
449      * include any 10-bit dynamic range profile advertised in
450      * {@link DynamicRangeProfiles#getSupportedProfiles()} combined with
451      * an output Surface pixel format different from {@link ImageFormat#PRIVATE}
452      * (the default for Surfaces initialized by {@link android.view.SurfaceView},
453      * {@link android.view.TextureView}, {@link android.media.MediaRecorder},
454      * {@link android.media.MediaCodec} etc.)
455      * or {@link ImageFormat#YCBCR_P010}.</p>
456      */
setDynamicRangeProfile(@rofile long profile)457     public void setDynamicRangeProfile(@Profile long profile) {
458         mDynamicRangeProfile = profile;
459     }
460 
461     /**
462      * Return current dynamic range profile.
463      *
464      * @return the currently set dynamic range profile
465      */
getDynamicRangeProfile()466     public @Profile long getDynamicRangeProfile() {
467         return mDynamicRangeProfile;
468     }
469 
470     /**
471      * Create a new {@link OutputConfiguration} instance.
472      *
473      * <p>This constructor takes an argument for desired camera rotation</p>
474      *
475      * @param surface
476      *          A Surface for camera to output to.
477      * @param rotation
478      *          The desired rotation to be applied on camera output. Value must be one of
479      *          ROTATION_[0, 90, 180, 270]. Note that when the rotation is 90 or 270 degrees,
480      *          application should make sure corresponding surface size has width and height
481      *          transposed relative to the width and height without rotation. For example,
482      *          if application needs camera to capture 1280x720 picture and rotate it by 90 degree,
483      *          application should set rotation to {@code ROTATION_90} and make sure the
484      *          corresponding Surface size is 720x1280. Note that {@link CameraDevice} might
485      *          throw {@code IllegalArgumentException} if device cannot perform such rotation.
486      * @hide
487      */
488     @SystemApi
OutputConfiguration(@onNull Surface surface, int rotation)489     public OutputConfiguration(@NonNull Surface surface, int rotation) {
490         this(SURFACE_GROUP_ID_NONE, surface, rotation);
491     }
492 
493     /**
494      * Create a new {@link OutputConfiguration} instance, with rotation and a group ID.
495      *
496      * <p>This constructor takes an argument for desired camera rotation and for the surface group
497      * ID.  See {@link #OutputConfiguration(int, Surface)} for details of the group ID.</p>
498      *
499      * @param surfaceGroupId
500      *          A group ID for this output, used for sharing memory between multiple outputs.
501      * @param surface
502      *          A Surface for camera to output to.
503      * @param rotation
504      *          The desired rotation to be applied on camera output. Value must be one of
505      *          ROTATION_[0, 90, 180, 270]. Note that when the rotation is 90 or 270 degrees,
506      *          application should make sure corresponding surface size has width and height
507      *          transposed relative to the width and height without rotation. For example,
508      *          if application needs camera to capture 1280x720 picture and rotate it by 90 degree,
509      *          application should set rotation to {@code ROTATION_90} and make sure the
510      *          corresponding Surface size is 720x1280. Note that {@link CameraDevice} might
511      *          throw {@code IllegalArgumentException} if device cannot perform such rotation.
512      * @hide
513      */
514     @SystemApi
OutputConfiguration(int surfaceGroupId, @NonNull Surface surface, int rotation)515     public OutputConfiguration(int surfaceGroupId, @NonNull Surface surface, int rotation) {
516         checkNotNull(surface, "Surface must not be null");
517         checkArgumentInRange(rotation, ROTATION_0, ROTATION_270, "Rotation constant");
518         mSurfaceGroupId = surfaceGroupId;
519         mSurfaceType = SURFACE_TYPE_UNKNOWN;
520         mSurfaces = new ArrayList<Surface>();
521         mSurfaces.add(surface);
522         mRotation = rotation;
523         mConfiguredSize = SurfaceUtils.getSurfaceSize(surface);
524         mConfiguredFormat = SurfaceUtils.getSurfaceFormat(surface);
525         mConfiguredDataspace = SurfaceUtils.getSurfaceDataspace(surface);
526         mConfiguredGenerationId = surface.getGenerationId();
527         mIsDeferredConfig = false;
528         mIsShared = false;
529         mPhysicalCameraId = null;
530         mIsMultiResolution = false;
531         mSensorPixelModesUsed = new ArrayList<Integer>();
532         mDynamicRangeProfile = DynamicRangeProfiles.STANDARD;
533         mStreamUseCase = CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
534         mTimestampBase = TIMESTAMP_BASE_DEFAULT;
535         mMirrorMode = MIRROR_MODE_AUTO;
536     }
537 
538     /**
539      * Create a list of {@link OutputConfiguration} instances for the outputs used by a
540      * {@link android.hardware.camera2.MultiResolutionImageReader}.
541      *
542      * <p>This constructor takes an argument for a
543      * {@link android.hardware.camera2.MultiResolutionImageReader}.</p>
544      *
545      * @param multiResolutionImageReader
546      *          The multi-resolution image reader object.
547      */
createInstancesForMultiResolutionOutput( @onNull MultiResolutionImageReader multiResolutionImageReader)548     public static @NonNull Collection<OutputConfiguration> createInstancesForMultiResolutionOutput(
549             @NonNull MultiResolutionImageReader multiResolutionImageReader)  {
550         checkNotNull(multiResolutionImageReader, "Multi-resolution image reader must not be null");
551 
552         int groupId = MULTI_RESOLUTION_GROUP_ID_COUNTER;
553         MULTI_RESOLUTION_GROUP_ID_COUNTER++;
554         // Skip in case the group id counter overflows to -1, the invalid value.
555         if (MULTI_RESOLUTION_GROUP_ID_COUNTER == -1) {
556             MULTI_RESOLUTION_GROUP_ID_COUNTER++;
557         }
558 
559         ImageReader[] imageReaders = multiResolutionImageReader.getReaders();
560         ArrayList<OutputConfiguration> configs = new ArrayList<OutputConfiguration>();
561         for (int i = 0; i < imageReaders.length; i++) {
562             MultiResolutionStreamInfo streamInfo =
563                     multiResolutionImageReader.getStreamInfoForImageReader(imageReaders[i]);
564 
565             OutputConfiguration config = new OutputConfiguration(
566                     groupId, imageReaders[i].getSurface());
567             config.setPhysicalCameraId(streamInfo.getPhysicalCameraId());
568             config.setMultiResolutionOutput();
569             configs.add(config);
570 
571             // No need to call addSensorPixelModeUsed for ultra high resolution sensor camera,
572             // because regular and max resolution output configurations are used for DEFAULT mode
573             // and MAX_RESOLUTION mode respectively by default.
574         }
575 
576         return configs;
577     }
578 
579     /**
580      * Create a new {@link OutputConfiguration} instance, with desired Surface size and Surface
581      * source class.
582      * <p>
583      * This constructor takes an argument for desired Surface size and the Surface source class
584      * without providing the actual output Surface. This is used to setup an output configuration
585      * with a deferred Surface. The application can use this output configuration to create a
586      * session.
587      * </p>
588      * <p>
589      * However, the actual output Surface must be set via {@link #addSurface} and the deferred
590      * Surface configuration must be finalized via {@link
591      * CameraCaptureSession#finalizeOutputConfigurations} before submitting a request with this
592      * Surface target. The deferred Surface can only be obtained either from {@link
593      * android.view.SurfaceView} by calling {@link android.view.SurfaceHolder#getSurface}, or from
594      * {@link android.graphics.SurfaceTexture} via
595      * {@link android.view.Surface#Surface(android.graphics.SurfaceTexture)}).
596      * </p>
597      *
598      * @param surfaceSize Size for the deferred surface.
599      * @param klass a non-{@code null} {@link Class} object reference that indicates the source of
600      *            this surface. Only {@link android.view.SurfaceHolder SurfaceHolder.class} and
601      *            {@link android.graphics.SurfaceTexture SurfaceTexture.class} are supported.
602      * @throws IllegalArgumentException if the Surface source class is not supported, or Surface
603      *         size is zero.
604      */
OutputConfiguration(@onNull Size surfaceSize, @NonNull Class<T> klass)605     public <T> OutputConfiguration(@NonNull Size surfaceSize, @NonNull Class<T> klass) {
606         checkNotNull(klass, "surfaceSize must not be null");
607         checkNotNull(klass, "klass must not be null");
608         if (klass == android.view.SurfaceHolder.class) {
609             mSurfaceType = SURFACE_TYPE_SURFACE_VIEW;
610         } else if (klass == android.graphics.SurfaceTexture.class) {
611             mSurfaceType = SURFACE_TYPE_SURFACE_TEXTURE;
612         } else {
613             mSurfaceType = SURFACE_TYPE_UNKNOWN;
614             throw new IllegalArgumentException("Unknow surface source class type");
615         }
616 
617         if (surfaceSize.getWidth() == 0 || surfaceSize.getHeight() == 0) {
618             throw new IllegalArgumentException("Surface size needs to be non-zero");
619         }
620 
621         mSurfaceGroupId = SURFACE_GROUP_ID_NONE;
622         mSurfaces = new ArrayList<Surface>();
623         mRotation = ROTATION_0;
624         mConfiguredSize = surfaceSize;
625         mConfiguredFormat = StreamConfigurationMap.imageFormatToInternal(ImageFormat.PRIVATE);
626         mConfiguredDataspace = StreamConfigurationMap.imageFormatToDataspace(ImageFormat.PRIVATE);
627         mConfiguredGenerationId = 0;
628         mIsDeferredConfig = true;
629         mIsShared = false;
630         mPhysicalCameraId = null;
631         mIsMultiResolution = false;
632         mSensorPixelModesUsed = new ArrayList<Integer>();
633         mDynamicRangeProfile = DynamicRangeProfiles.STANDARD;
634         mStreamUseCase = CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
635     }
636 
637     /**
638      * Enable multiple surfaces sharing the same OutputConfiguration
639      *
640      * <p>For advanced use cases, a camera application may require more streams than the combination
641      * guaranteed by {@link CameraDevice#createCaptureSession}. In this case, more than one
642      * compatible surface can be attached to an OutputConfiguration so that they map to one
643      * camera stream, and the outputs share memory buffers when possible. Due to buffer sharing
644      * clients should be careful when adding surface outputs that modify their input data. If such
645      * case exists, camera clients should have an additional mechanism to synchronize read and write
646      * access between individual consumers.</p>
647      *
648      * <p>Two surfaces are compatible in the below cases:</p>
649      *
650      * <li> Surfaces with the same size, format, dataSpace, and Surface source class. In this case,
651      * {@link CameraDevice#createCaptureSessionByOutputConfigurations} is guaranteed to succeed.
652      *
653      * <li> Surfaces with the same size, format, and dataSpace, but different Surface source classes
654      * that are generally not compatible. However, on some devices, the underlying camera device is
655      * able to use the same buffer layout for both surfaces. The only way to discover if this is the
656      * case is to create a capture session with that output configuration. For example, if the
657      * camera device uses the same private buffer format between a SurfaceView/SurfaceTexture and a
658      * MediaRecorder/MediaCodec, {@link CameraDevice#createCaptureSessionByOutputConfigurations}
659      * will succeed. Otherwise, it fails with {@link
660      * CameraCaptureSession.StateCallback#onConfigureFailed}.
661      * </ol>
662      *
663      * <p>To enable surface sharing, this function must be called before {@link
664      * CameraDevice#createCaptureSessionByOutputConfigurations} or {@link
665      * CameraDevice#createReprocessableCaptureSessionByConfigurations}. Calling this function after
666      * {@link CameraDevice#createCaptureSessionByOutputConfigurations} has no effect.</p>
667      *
668      * <p>Up to {@link #getMaxSharedSurfaceCount} surfaces can be shared for an OutputConfiguration.
669      * The supported surfaces for sharing must be of type SurfaceTexture, SurfaceView,
670      * MediaRecorder, MediaCodec, or implementation defined ImageReader.</p>
671      *
672      * <p>This function must not be called from OuptutConfigurations created by {@link
673      * #createInstancesForMultiResolutionOutput}.</p>
674      *
675      * @throws IllegalStateException If this OutputConfiguration is created via {@link
676      * #createInstancesForMultiResolutionOutput} to back a MultiResolutionImageReader.
677      */
enableSurfaceSharing()678     public void enableSurfaceSharing() {
679         if (mIsMultiResolution) {
680             throw new IllegalStateException("Cannot enable surface sharing on "
681                     + "multi-resolution output configurations");
682         }
683         mIsShared = true;
684     }
685 
686     /**
687      * Set the id of the physical camera for this OutputConfiguration
688      *
689      * <p>In the case one logical camera is made up of multiple physical cameras, it could be
690      * desirable for the camera application to request streams from individual physical cameras.
691      * This call achieves it by mapping the OutputConfiguration to the physical camera id.</p>
692      *
693      * <p>The valid physical camera ids can be queried by {@link
694      * CameraCharacteristics#getPhysicalCameraIds}.</p>
695      *
696      * <p>Passing in a null physicalCameraId means that the OutputConfiguration is for a logical
697      * stream.</p>
698      *
699      * <p>This function must be called before {@link
700      * CameraDevice#createCaptureSessionByOutputConfigurations} or {@link
701      * CameraDevice#createReprocessableCaptureSessionByConfigurations}. Calling this function
702      * after {@link CameraDevice#createCaptureSessionByOutputConfigurations} or {@link
703      * CameraDevice#createReprocessableCaptureSessionByConfigurations} has no effect.</p>
704      *
705      * <p>As of {@link android.os.Build.VERSION_CODES#S Android 12}, an image buffer from a
706      * physical camera stream can be used for reprocessing to logical camera streams and streams
707      * from the same physical camera if the camera device supports multi-resolution input and output
708      * streams. See {@link CameraCharacteristics#SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP}
709      * for details. The behaviors of reprocessing from a non-physical camera stream to a physical
710      * camera stream, and from a physical camera stream to a physical camera stream of different
711      * physical camera, are device-specific and not guaranteed to be supported.</p>
712      *
713      * <p>On prior API levels, the surface belonging to a physical camera OutputConfiguration must
714      * not be used as input or output of a reprocessing request. </p>
715      */
setPhysicalCameraId(@ullable String physicalCameraId)716     public void setPhysicalCameraId(@Nullable String physicalCameraId) {
717         mPhysicalCameraId = physicalCameraId;
718     }
719 
720     /**
721      * Add a sensor pixel mode that this OutputConfiguration will be used in.
722      *
723      * <p> In the case that this output stream configuration (format, width, height) is
724      * available through {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP}
725      * configurations and
726      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION},
727      * configurations, the camera sub-system will assume that this {@link OutputConfiguration} will
728      * be used only with {@link android.hardware.camera2.CaptureRequest}s which has
729      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE} set to
730      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT}.
731      * In such cases, if clients intend to use the
732      * {@link OutputConfiguration}(s) in a {@link android.hardware.camera2.CaptureRequest} with
733      * other sensor pixel modes, they must specify which
734      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE}(s) they will use this
735      * {@link OutputConfiguration} with, by calling this method.
736      *
737      * In case this output stream configuration (format, width, height) is only in
738      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION},
739      * configurations, this output target must only be used with
740      * {@link android.hardware.camera2.CaptureRequest}s which has
741      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE} set to
742      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION} and that
743      * is what the camera sub-system will assume. If clients add
744      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT} in this
745      * case, session configuration will fail, if this {@link OutputConfiguration} is included.
746      *
747      * In case this output stream configuration (format, width, height) is only in
748      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP},
749      * configurations, this output target must only be used with
750      * {@link android.hardware.camera2.CaptureRequest}s which has
751      * {@link android.hardware.camera2.CaptureRequest#SENSOR_PIXEL_MODE} set to
752      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT} and that is what
753      * the camera sub-system will assume. If clients add
754      * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION} in this
755      * case, session configuration will fail, if this {@link OutputConfiguration} is included.
756      *
757      * @param sensorPixelModeUsed The sensor pixel mode this OutputConfiguration will be used with
758      * </p>
759      *
760      */
addSensorPixelModeUsed(@ensorPixelMode int sensorPixelModeUsed)761     public void addSensorPixelModeUsed(@SensorPixelMode int sensorPixelModeUsed) {
762         // Verify that the values are in range.
763         if (sensorPixelModeUsed != CameraMetadata.SENSOR_PIXEL_MODE_DEFAULT &&
764                 sensorPixelModeUsed != CameraMetadata.SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) {
765             throw new IllegalArgumentException("Not a valid sensor pixel mode " +
766                     sensorPixelModeUsed);
767         }
768 
769         if (mSensorPixelModesUsed.contains(sensorPixelModeUsed)) {
770             // Already added, ignore;
771             return;
772         }
773         mSensorPixelModesUsed.add(sensorPixelModeUsed);
774     }
775 
776     /**
777      * Remove a sensor pixel mode, previously added through addSensorPixelModeUsed, from this
778      * OutputConfiguration.
779      *
780      * <p> Sensor pixel modes added via calls to {@link #addSensorPixelModeUsed} can also be removed
781      * from the OutputConfiguration.</p>
782      *
783      * @param sensorPixelModeUsed The sensor pixel mode to be removed.
784      *
785      * @throws IllegalArgumentException If the sensor pixel mode wasn't previously added
786      *                                  through {@link #addSensorPixelModeUsed}.
787      */
removeSensorPixelModeUsed(@ensorPixelMode int sensorPixelModeUsed)788     public void removeSensorPixelModeUsed(@SensorPixelMode int sensorPixelModeUsed) {
789       if (!mSensorPixelModesUsed.remove(Integer.valueOf(sensorPixelModeUsed))) {
790             throw new IllegalArgumentException("sensorPixelMode " + sensorPixelModeUsed +
791                     "is not part of this output configuration");
792       }
793     }
794 
795     /**
796      * Check if this configuration is for a physical camera.
797      *
798      * <p>This returns true if the output configuration was for a physical camera making up a
799      * logical multi camera via {@link OutputConfiguration#setPhysicalCameraId}.</p>
800      * @hide
801      */
isForPhysicalCamera()802     public boolean isForPhysicalCamera() {
803         return (mPhysicalCameraId != null);
804     }
805 
806     /**
807      * Check if this configuration has deferred configuration.
808      *
809      * <p>This will return true if the output configuration was constructed with surface deferred by
810      * {@link OutputConfiguration#OutputConfiguration(Size, Class)}. It will return true even after
811      * the deferred surface is added later by {@link OutputConfiguration#addSurface}.</p>
812      *
813      * @return true if this configuration has deferred surface.
814      * @hide
815      */
isDeferredConfiguration()816     public boolean isDeferredConfiguration() {
817         return mIsDeferredConfig;
818     }
819 
820     /**
821      * Add a surface to this OutputConfiguration.
822      *
823      * <p> This function can be called before or after {@link
824      * CameraDevice#createCaptureSessionByOutputConfigurations}. If it's called after,
825      * the application must finalize the capture session with
826      * {@link CameraCaptureSession#finalizeOutputConfigurations}. It is possible to call this method
827      * after the output configurations have been finalized only in cases of enabled surface sharing
828      * see {@link #enableSurfaceSharing}. The modified output configuration must be updated with
829      * {@link CameraCaptureSession#updateOutputConfiguration}.</p>
830      *
831      * <p> If the OutputConfiguration was constructed with a deferred surface by {@link
832      * OutputConfiguration#OutputConfiguration(Size, Class)}, the added surface must be obtained
833      * from {@link android.view.SurfaceView} by calling {@link android.view.SurfaceHolder#getSurface},
834      * or from {@link android.graphics.SurfaceTexture} via
835      * {@link android.view.Surface#Surface(android.graphics.SurfaceTexture)}).</p>
836      *
837      * <p> If the OutputConfiguration was constructed by other constructors, the added
838      * surface must be compatible with the existing surface. See {@link #enableSurfaceSharing} for
839      * details of compatible surfaces.</p>
840      *
841      * <p> If the OutputConfiguration already contains a Surface, {@link #enableSurfaceSharing} must
842      * be called before calling this function to add a new Surface.</p>
843      *
844      * @param surface The surface to be added.
845      * @throws IllegalArgumentException if the Surface is invalid, the Surface's
846      *         dataspace/format doesn't match, or adding the Surface would exceed number of
847      *         shared surfaces supported.
848      * @throws IllegalStateException if the Surface was already added to this OutputConfiguration,
849      *         or if the OutputConfiguration is not shared and it already has a surface associated
850      *         with it.
851      */
addSurface(@onNull Surface surface)852     public void addSurface(@NonNull Surface surface) {
853         checkNotNull(surface, "Surface must not be null");
854         if (mSurfaces.contains(surface)) {
855             throw new IllegalStateException("Surface is already added!");
856         }
857         if (mSurfaces.size() == 1 && !mIsShared) {
858             throw new IllegalStateException("Cannot have 2 surfaces for a non-sharing configuration");
859         }
860         if (mSurfaces.size() + 1 > MAX_SURFACES_COUNT) {
861             throw new IllegalArgumentException("Exceeds maximum number of surfaces");
862         }
863 
864         // This will throw IAE is the surface was abandoned.
865         Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
866         if (!surfaceSize.equals(mConfiguredSize)) {
867             Log.w(TAG, "Added surface size " + surfaceSize +
868                     " is different than pre-configured size " + mConfiguredSize +
869                     ", the pre-configured size will be used.");
870         }
871 
872         if (mConfiguredFormat != SurfaceUtils.getSurfaceFormat(surface)) {
873             throw new IllegalArgumentException("The format of added surface format doesn't match");
874         }
875 
876         // If the surface format is PRIVATE, do not enforce dataSpace because camera device may
877         // override it.
878         if (mConfiguredFormat != ImageFormat.PRIVATE &&
879                 mConfiguredDataspace != SurfaceUtils.getSurfaceDataspace(surface)) {
880             throw new IllegalArgumentException("The dataspace of added surface doesn't match");
881         }
882 
883         mSurfaces.add(surface);
884     }
885 
886     /**
887      * Remove a surface from this OutputConfiguration.
888      *
889      * <p> Surfaces added via calls to {@link #addSurface} can also be removed from the
890      *  OutputConfiguration. The only notable exception is the surface associated with
891      *  the OutputConfigration see {@link #getSurface} which was passed as part of the constructor
892      *  or was added first in the deferred case
893      *  {@link OutputConfiguration#OutputConfiguration(Size, Class)}.</p>
894      *
895      * @param surface The surface to be removed.
896      *
897      * @throws IllegalArgumentException If the surface is associated with this OutputConfiguration
898      *                                  (see {@link #getSurface}) or the surface didn't get added
899      *                                  with {@link #addSurface}.
900      */
removeSurface(@onNull Surface surface)901     public void removeSurface(@NonNull Surface surface) {
902         if (getSurface() == surface) {
903             throw new IllegalArgumentException(
904                     "Cannot remove surface associated with this output configuration");
905         }
906         if (!mSurfaces.remove(surface)) {
907             throw new IllegalArgumentException("Surface is not part of this output configuration");
908         }
909     }
910 
911     /**
912      * Set stream use case for this OutputConfiguration
913      *
914      * <p>Stream use case is used to describe the purpose of the stream, whether it's for live
915      * preview, still image capture, video recording, or their combinations. This flag is useful
916      * for scenarios where the immediate consumer target isn't sufficient to indicate the stream's
917      * usage.</p>
918      *
919      * <p>The main difference beteween stream use case and capture intent is that the former
920      * enables the camera device to optimize camera hardware and software pipelines based on user
921      * scenarios for each stream, whereas the latter is mainly a hint to camera to decide
922      * optimal 3A strategy that's applicable to the whole session. The camera device carries out
923      * configurations such as selecting tuning parameters, choosing camera sensor mode, and
924      * constructing image processing pipeline based on the streams's use cases. Capture intents are
925      * then used to fine tune 3A behaviors such as adjusting AE/AF convergence speed, and capture
926      * intents may change during the lifetime of a session. For example, for a session with a
927      * PREVIEW_VIDEO_STILL use case stream and a STILL_CAPTURE use case stream, the capture intents
928      * may be PREVIEW with fast 3A convergence speed and flash metering with automatic control for
929      * live preview, STILL_CAPTURE with best 3A parameters for still photo capture, or VIDEO_RECORD
930      * with slower 3A convergence speed for better video playback experience.</p>
931      *
932      * <p>The supported stream use cases supported by a camera device can be queried by
933      * {@link android.hardware.camera2.CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES}.</p>
934      *
935      * <p>The mandatory stream combinations involving stream use cases can be found at {@link
936      * android.hardware.camera2.CameraDevice#createCaptureSession}, as well as queried via
937      * {@link android.hardware.camera2.params.MandatoryStreamCombination}. The application is
938      * strongly recommended to select one of the guaranteed stream combinations where all streams'
939      * use cases are set to non-DEFAULT values. If the application chooses a stream combination
940      * not in the mandatory list, the camera device may ignore some use case flags due to
941      * hardware constraints or implementation details.</p>
942      *
943      * <p>This function must be called before {@link CameraDevice#createCaptureSession} or {@link
944      * CameraDevice#createCaptureSessionByOutputConfigurations}. Calling this function after
945      * {@link CameraDevice#createCaptureSession} or
946      * {@link CameraDevice#createCaptureSessionByOutputConfigurations} has no effect to the camera
947      * session.</p>
948      *
949      * @param streamUseCase The stream use case to be set.
950      *
951      * @throws IllegalArgumentException If the streamUseCase isn't within the range of valid
952      *                                  values.
953      */
setStreamUseCase(@treamUseCase long streamUseCase)954     public void setStreamUseCase(@StreamUseCase long streamUseCase) {
955         // Verify that the value is in range
956         long maxUseCaseValue = CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL;
957         if (streamUseCase > maxUseCaseValue &&
958                 streamUseCase < CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START) {
959             throw new IllegalArgumentException("Not a valid stream use case value " +
960                     streamUseCase);
961         }
962 
963         mStreamUseCase = streamUseCase;
964     }
965 
966     /**
967      * Get the current stream use case
968      *
969      * <p>If no {@link #setStreamUseCase} is called first, this function returns
970      * {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT DEFAULT}.</p>
971      *
972      * @return the currently set stream use case
973      */
getStreamUseCase()974     public long getStreamUseCase() {
975         return mStreamUseCase;
976     }
977 
978     /**
979      * Set timestamp base for this output target
980      *
981      * <p>Timestamp base describes the time domain of images from this
982      * camera output and its relationship with {@link
983      * CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE}.</p>
984      *
985      * <p>If this function is not called, the timestamp base for this output
986      * is {@link #TIMESTAMP_BASE_DEFAULT}, with which the camera device adjusts
987      * timestamps based on the output target.</p>
988      *
989      * <p>See {@link #TIMESTAMP_BASE_DEFAULT}, {@link #TIMESTAMP_BASE_SENSOR},
990      * and {@link #TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED} for details of each timestamp base.</p>
991      *
992      * @param timestampBase The timestamp base to be set.
993      *
994      * @throws IllegalArgumentException If the timestamp base isn't within the range of valid
995      *                                  values.
996      */
setTimestampBase(@imestampBase int timestampBase)997     public void setTimestampBase(@TimestampBase int timestampBase) {
998         // Verify that the value is in range
999         if (timestampBase < TIMESTAMP_BASE_DEFAULT ||
1000                 timestampBase > TIMESTAMP_BASE_READOUT_SENSOR) {
1001             throw new IllegalArgumentException("Not a valid timestamp base value " +
1002                     timestampBase);
1003         }
1004         mTimestampBase = timestampBase;
1005     }
1006 
1007     /**
1008      * Get the current timestamp base
1009      *
1010      * <p>If no {@link #setTimestampBase} is called first, this function returns
1011      * {@link #TIMESTAMP_BASE_DEFAULT}.</p>
1012      *
1013      * @return The currently set timestamp base
1014      */
getTimestampBase()1015     public @TimestampBase int getTimestampBase() {
1016         return mTimestampBase;
1017     }
1018 
1019     /**
1020      * Set the mirroring mode for this output target
1021      *
1022      * <p>If this function is not called, the mirroring mode for this output is
1023      * {@link #MIRROR_MODE_AUTO}, with which the camera API will mirror the output images
1024      * horizontally for front facing camera.</p>
1025      *
1026      * <p>For efficiency, the mirror effect is applied as a transform flag, so it is only effective
1027      * in some outputs. It works automatically for SurfaceView and TextureView outputs. For manual
1028      * use of SurfaceTexture, it is reflected in the value of
1029      * {@link android.graphics.SurfaceTexture#getTransformMatrix}. For other end points, such as
1030      * ImageReader, MediaRecorder, or MediaCodec, the mirror mode has no effect. If mirroring is
1031      * needed for such outputs, the application needs to mirror the image buffers itself before
1032      * passing them onward.</p>
1033      */
setMirrorMode(@irrorMode int mirrorMode)1034     public void setMirrorMode(@MirrorMode int mirrorMode) {
1035         // Verify that the value is in range
1036         if (mirrorMode < MIRROR_MODE_AUTO ||
1037                 mirrorMode > MIRROR_MODE_V) {
1038             throw new IllegalArgumentException("Not a valid mirror mode " + mirrorMode);
1039         }
1040         mMirrorMode = mirrorMode;
1041     }
1042 
1043     /**
1044      * Get the current mirroring mode
1045      *
1046      * <p>If no {@link #setMirrorMode} is called first, this function returns
1047      * {@link #MIRROR_MODE_AUTO}.</p>
1048      *
1049      * @return The currently set mirroring mode
1050      */
getMirrorMode()1051     public @MirrorMode int getMirrorMode() {
1052         return mMirrorMode;
1053     }
1054 
1055     /**
1056      * Create a new {@link OutputConfiguration} instance with another {@link OutputConfiguration}
1057      * instance.
1058      *
1059      * @param other Another {@link OutputConfiguration} instance to be copied.
1060      *
1061      * @hide
1062      */
OutputConfiguration(@onNull OutputConfiguration other)1063     public OutputConfiguration(@NonNull OutputConfiguration other) {
1064         if (other == null) {
1065             throw new IllegalArgumentException("OutputConfiguration shouldn't be null");
1066         }
1067 
1068         this.mSurfaces = other.mSurfaces;
1069         this.mRotation = other.mRotation;
1070         this.mSurfaceGroupId = other.mSurfaceGroupId;
1071         this.mSurfaceType = other.mSurfaceType;
1072         this.mConfiguredDataspace = other.mConfiguredDataspace;
1073         this.mConfiguredFormat = other.mConfiguredFormat;
1074         this.mConfiguredSize = other.mConfiguredSize;
1075         this.mConfiguredGenerationId = other.mConfiguredGenerationId;
1076         this.mIsDeferredConfig = other.mIsDeferredConfig;
1077         this.mIsShared = other.mIsShared;
1078         this.mPhysicalCameraId = other.mPhysicalCameraId;
1079         this.mIsMultiResolution = other.mIsMultiResolution;
1080         this.mSensorPixelModesUsed = other.mSensorPixelModesUsed;
1081         this.mDynamicRangeProfile = other.mDynamicRangeProfile;
1082         this.mStreamUseCase = other.mStreamUseCase;
1083         this.mTimestampBase = other.mTimestampBase;
1084         this.mMirrorMode = other.mMirrorMode;
1085     }
1086 
1087     /**
1088      * Create an OutputConfiguration from Parcel.
1089      */
OutputConfiguration(@onNull Parcel source)1090     private OutputConfiguration(@NonNull Parcel source) {
1091         int rotation = source.readInt();
1092         int surfaceSetId = source.readInt();
1093         int surfaceType = source.readInt();
1094         int width = source.readInt();
1095         int height = source.readInt();
1096         boolean isDeferred = source.readInt() == 1;
1097         boolean isShared = source.readInt() == 1;
1098         ArrayList<Surface> surfaces = new ArrayList<Surface>();
1099         source.readTypedList(surfaces, Surface.CREATOR);
1100         String physicalCameraId = source.readString();
1101         boolean isMultiResolutionOutput = source.readInt() == 1;
1102         int[] sensorPixelModesUsed = source.createIntArray();
1103         long streamUseCase = source.readLong();
1104 
1105         checkArgumentInRange(rotation, ROTATION_0, ROTATION_270, "Rotation constant");
1106         long dynamicRangeProfile = source.readLong();
1107         DynamicRangeProfiles.checkProfileValue(dynamicRangeProfile);
1108 
1109         int timestampBase = source.readInt();
1110         int mirrorMode = source.readInt();
1111 
1112         mSurfaceGroupId = surfaceSetId;
1113         mRotation = rotation;
1114         mSurfaces = surfaces;
1115         mConfiguredSize = new Size(width, height);
1116         mIsDeferredConfig = isDeferred;
1117         mIsShared = isShared;
1118         mSurfaces = surfaces;
1119         if (mSurfaces.size() > 0) {
1120             mSurfaceType = SURFACE_TYPE_UNKNOWN;
1121             mConfiguredFormat = SurfaceUtils.getSurfaceFormat(mSurfaces.get(0));
1122             mConfiguredDataspace = SurfaceUtils.getSurfaceDataspace(mSurfaces.get(0));
1123             mConfiguredGenerationId = mSurfaces.get(0).getGenerationId();
1124         } else {
1125             mSurfaceType = surfaceType;
1126             mConfiguredFormat = StreamConfigurationMap.imageFormatToInternal(ImageFormat.PRIVATE);
1127             mConfiguredDataspace =
1128                     StreamConfigurationMap.imageFormatToDataspace(ImageFormat.PRIVATE);
1129             mConfiguredGenerationId = 0;
1130         }
1131         mPhysicalCameraId = physicalCameraId;
1132         mIsMultiResolution = isMultiResolutionOutput;
1133         mSensorPixelModesUsed = convertIntArrayToIntegerList(sensorPixelModesUsed);
1134         mDynamicRangeProfile = dynamicRangeProfile;
1135         mStreamUseCase = streamUseCase;
1136         mTimestampBase = timestampBase;
1137         mMirrorMode = mirrorMode;
1138     }
1139 
1140     /**
1141      * Get the maximum supported shared {@link Surface} count.
1142      *
1143      * @return the maximum number of surfaces that can be added per each OutputConfiguration.
1144      *
1145      * @see #enableSurfaceSharing
1146      */
getMaxSharedSurfaceCount()1147     public int getMaxSharedSurfaceCount() {
1148         return MAX_SURFACES_COUNT;
1149     }
1150 
1151     /**
1152      * Get the {@link Surface} associated with this {@link OutputConfiguration}.
1153      *
1154      * If more than one surface is associated with this {@link OutputConfiguration}, return the
1155      * first one as specified in the constructor or {@link OutputConfiguration#addSurface}.
1156      */
getSurface()1157     public @Nullable Surface getSurface() {
1158         if (mSurfaces.size() == 0) {
1159             return null;
1160         }
1161 
1162         return mSurfaces.get(0);
1163     }
1164 
1165     /**
1166      * Get the immutable list of surfaces associated with this {@link OutputConfiguration}.
1167      *
1168      * @return the list of surfaces associated with this {@link OutputConfiguration} as specified in
1169      * the constructor and {@link OutputConfiguration#addSurface}. The list should not be modified.
1170      */
1171     @NonNull
getSurfaces()1172     public List<Surface> getSurfaces() {
1173         return Collections.unmodifiableList(mSurfaces);
1174     }
1175 
1176     /**
1177      * Get the rotation associated with this {@link OutputConfiguration}.
1178      *
1179      * @return the rotation associated with this {@link OutputConfiguration}.
1180      *         Value will be one of ROTATION_[0, 90, 180, 270]
1181      *
1182      * @hide
1183      */
1184     @SystemApi
getRotation()1185     public int getRotation() {
1186         return mRotation;
1187     }
1188 
1189     /**
1190      * Get the surface group ID associated with this {@link OutputConfiguration}.
1191      *
1192      * @return the surface group ID associated with this {@link OutputConfiguration}.
1193      *         The default value is {@value #SURFACE_GROUP_ID_NONE}.
1194      */
getSurfaceGroupId()1195     public int getSurfaceGroupId() {
1196         return mSurfaceGroupId;
1197     }
1198 
1199     public static final @android.annotation.NonNull Parcelable.Creator<OutputConfiguration> CREATOR =
1200             new Parcelable.Creator<OutputConfiguration>() {
1201         @Override
1202         public OutputConfiguration createFromParcel(Parcel source) {
1203             return new OutputConfiguration(source);
1204         }
1205 
1206         @Override
1207         public OutputConfiguration[] newArray(int size) {
1208             return new OutputConfiguration[size];
1209         }
1210     };
1211 
1212     @Override
describeContents()1213     public int describeContents() {
1214         return 0;
1215     }
1216 
convertIntegerToIntList(List<Integer> integerList)1217     private static int[] convertIntegerToIntList(List<Integer> integerList) {
1218         int[] integerArray = new int[integerList.size()];
1219         for (int i = 0; i < integerList.size(); i++) {
1220             integerArray[i] = integerList.get(i);
1221         }
1222         return integerArray;
1223     }
1224 
convertIntArrayToIntegerList(int[] intArray)1225     private static ArrayList<Integer> convertIntArrayToIntegerList(int[] intArray) {
1226         ArrayList<Integer> integerList = new ArrayList<Integer>();
1227         if (intArray == null) {
1228             return integerList;
1229         }
1230         for (int i = 0; i < intArray.length; i++) {
1231             integerList.add(intArray[i]);
1232         }
1233         return integerList;
1234     }
1235 
1236     @Override
writeToParcel(Parcel dest, int flags)1237     public void writeToParcel(Parcel dest, int flags) {
1238         if (dest == null) {
1239             throw new IllegalArgumentException("dest must not be null");
1240         }
1241         dest.writeInt(mRotation);
1242         dest.writeInt(mSurfaceGroupId);
1243         dest.writeInt(mSurfaceType);
1244         dest.writeInt(mConfiguredSize.getWidth());
1245         dest.writeInt(mConfiguredSize.getHeight());
1246         dest.writeInt(mIsDeferredConfig ? 1 : 0);
1247         dest.writeInt(mIsShared ? 1 : 0);
1248         dest.writeTypedList(mSurfaces);
1249         dest.writeString(mPhysicalCameraId);
1250         dest.writeInt(mIsMultiResolution ? 1 : 0);
1251         // writeList doesn't seem to work well with Integer list.
1252         dest.writeIntArray(convertIntegerToIntList(mSensorPixelModesUsed));
1253         dest.writeLong(mDynamicRangeProfile);
1254         dest.writeLong(mStreamUseCase);
1255         dest.writeInt(mTimestampBase);
1256         dest.writeInt(mMirrorMode);
1257     }
1258 
1259     /**
1260      * Check if this {@link OutputConfiguration} is equal to another {@link OutputConfiguration}.
1261      *
1262      * <p>Two output configurations are only equal if and only if the underlying surfaces, surface
1263      * properties (width, height, format, dataspace) when the output configurations are created,
1264      * and all other configuration parameters are equal. </p>
1265      *
1266      * @return {@code true} if the objects were equal, {@code false} otherwise
1267      */
1268     @Override
equals(@ullable Object obj)1269     public boolean equals(@Nullable Object obj) {
1270         if (obj == null) {
1271             return false;
1272         } else if (this == obj) {
1273             return true;
1274         } else if (obj instanceof OutputConfiguration) {
1275             final OutputConfiguration other = (OutputConfiguration) obj;
1276             if (mRotation != other.mRotation ||
1277                     !mConfiguredSize.equals(other.mConfiguredSize) ||
1278                     mConfiguredFormat != other.mConfiguredFormat ||
1279                     mSurfaceGroupId != other.mSurfaceGroupId ||
1280                     mSurfaceType != other.mSurfaceType ||
1281                     mIsDeferredConfig != other.mIsDeferredConfig ||
1282                     mIsShared != other.mIsShared ||
1283                     mConfiguredFormat != other.mConfiguredFormat ||
1284                     mConfiguredDataspace != other.mConfiguredDataspace ||
1285                     mConfiguredGenerationId != other.mConfiguredGenerationId ||
1286                     !Objects.equals(mPhysicalCameraId, other.mPhysicalCameraId) ||
1287                     mIsMultiResolution != other.mIsMultiResolution ||
1288                     mStreamUseCase != other.mStreamUseCase ||
1289                     mTimestampBase != other.mTimestampBase ||
1290                     mMirrorMode != other.mMirrorMode)
1291                 return false;
1292             if (mSensorPixelModesUsed.size() != other.mSensorPixelModesUsed.size()) {
1293                 return false;
1294             }
1295             for (int j = 0; j < mSensorPixelModesUsed.size(); j++) {
1296                 if (mSensorPixelModesUsed.get(j) != other.mSensorPixelModesUsed.get(j)) {
1297                     return false;
1298                 }
1299             }
1300             int minLen = Math.min(mSurfaces.size(), other.mSurfaces.size());
1301             for (int i = 0;  i < minLen; i++) {
1302                 if (mSurfaces.get(i) != other.mSurfaces.get(i))
1303                     return false;
1304             }
1305             if (mDynamicRangeProfile != other.mDynamicRangeProfile) {
1306                 return false;
1307             }
1308 
1309             return true;
1310         }
1311         return false;
1312     }
1313 
1314     /**
1315      * {@inheritDoc}
1316      */
1317     @Override
hashCode()1318     public int hashCode() {
1319         // Need ensure that the hashcode remains unchanged after adding a deferred surface. Otherwise
1320         // the deferred output configuration will be lost in the camera streammap after the deferred
1321         // surface is set.
1322         if (mIsDeferredConfig) {
1323             return HashCodeHelpers.hashCode(
1324                     mRotation, mConfiguredSize.hashCode(), mConfiguredFormat, mConfiguredDataspace,
1325                     mSurfaceGroupId, mSurfaceType, mIsShared ? 1 : 0,
1326                     mPhysicalCameraId == null ? 0 : mPhysicalCameraId.hashCode(),
1327                     mIsMultiResolution ? 1 : 0, mSensorPixelModesUsed.hashCode(),
1328                     mDynamicRangeProfile, mStreamUseCase, mTimestampBase, mMirrorMode);
1329         }
1330 
1331         return HashCodeHelpers.hashCode(
1332                 mRotation, mSurfaces.hashCode(), mConfiguredGenerationId,
1333                 mConfiguredSize.hashCode(), mConfiguredFormat,
1334                 mConfiguredDataspace, mSurfaceGroupId, mIsShared ? 1 : 0,
1335                 mPhysicalCameraId == null ? 0 : mPhysicalCameraId.hashCode(),
1336                 mIsMultiResolution ? 1 : 0, mSensorPixelModesUsed.hashCode(),
1337                 mDynamicRangeProfile, mStreamUseCase, mTimestampBase,
1338                 mMirrorMode);
1339     }
1340 
1341     private static final String TAG = "OutputConfiguration";
1342 
1343     // A surfaceGroupId counter used for MultiResolutionImageReader. Its value is
1344     // incremented everytime {@link createInstancesForMultiResolutionOutput} is called.
1345     private static int MULTI_RESOLUTION_GROUP_ID_COUNTER = 0;
1346 
1347     private ArrayList<Surface> mSurfaces;
1348     private final int mRotation;
1349     private final int mSurfaceGroupId;
1350     // Surface source type, this is only used by the deferred surface configuration objects.
1351     private final int mSurfaceType;
1352 
1353     // The size, format, and dataspace of the surface when OutputConfiguration is created.
1354     private final Size mConfiguredSize;
1355     private final int mConfiguredFormat;
1356     private final int mConfiguredDataspace;
1357     // Surface generation ID to distinguish changes to Surface native internals
1358     private final int mConfiguredGenerationId;
1359     // Flag indicating if this config has deferred surface.
1360     private final boolean mIsDeferredConfig;
1361     // Flag indicating if this config has shared surfaces
1362     private boolean mIsShared;
1363     // The physical camera id that this output configuration is for.
1364     private String mPhysicalCameraId;
1365     // Flag indicating if this config is for a multi-resolution output with a
1366     // MultiResolutionImageReader
1367     private boolean mIsMultiResolution;
1368     // The sensor pixel modes that this OutputConfiguration will use
1369     private ArrayList<Integer> mSensorPixelModesUsed;
1370     // Dynamic range profile
1371     private long mDynamicRangeProfile;
1372     // Stream use case
1373     private long mStreamUseCase;
1374     // Timestamp base
1375     private int mTimestampBase;
1376     // Mirroring mode
1377     private int mMirrorMode;
1378 }
1379