1 /*
2  * Copyright 2020 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package androidx.camera.camera2.pipe
18 
19 import android.hardware.camera2.params.OutputConfiguration
20 import android.util.Size
21 import androidx.annotation.RequiresApi
22 import androidx.annotation.RestrictTo
23 import androidx.camera.camera2.pipe.OutputStream.DynamicRangeProfile.Companion.STANDARD
24 import androidx.camera.camera2.pipe.OutputStream.MirrorMode.Companion.MIRROR_MODE_AUTO
25 import androidx.camera.camera2.pipe.OutputStream.StreamUseCase.Companion.DEFAULT
26 import androidx.camera.camera2.pipe.OutputStream.TimestampBase.Companion.TIMESTAMP_BASE_DEFAULT
27 import androidx.camera.camera2.pipe.compat.Api33Compat
28 
29 /**
30  * A [CameraStream] is used on a [CameraGraph] to control what outputs that graph produces.
31  * - Each [CameraStream] must have a surface associated with it in the [CameraGraph]. This surface
32  *   may be changed, although this may cause the camera to stall and reconfigure.
33  * - [CameraStream]'s may be added to [Request]'s that are sent to the [CameraGraph]. This causes
34  *   the associated surface to be used by the Camera to produce one or more of the outputs (defined
35  *   by outputs.
36  *
37  * [CameraStream] may be configured in several different ways with the requirement that each
38  * [CameraStream] may only represent a single surface that is sent to Camera2, and that each
39  * [CameraStream] must produce one or more distinct outputs.
40  *
41  * There are three main components that will be wired together, the [CameraStream], the Camera2
42  * [OutputConfiguration], and the [OutputStream]'s. In each of these examples a [CameraStream] is
43  * associated with a distinct surface that may be sent to camera2 to produce 1 or more distinct
44  * outputs defined in the list of [OutputStream]'s.
45  *
46  * Simple 1:1 configuration
47  *
48  *   ```
49  *   CameraStream-1 -> OutputConfig-1 -> OutputStream-1
50  *   CameraStream-2 -> OutputConfig-2 -> OutputStream-2
51  *   ```
52  *
53  * Stream sharing (Multiple surfaces use the same OutputConfig object)
54  *
55  *   ```
56  *   CameraStream-1 --------------------> OutputStream-1
57  *                  >- OutputConfig-1 -<
58  *   CameraStream-2 --------------------> OutputStream-2
59  *   ```
60  *
61  * Multi-Output / External OutputConfiguration (Camera2 may produce one or more of the outputs)
62  *
63  *   ```
64  *   CameraStream-1 -> OutputConfig-1 -> OutputStream-1
65  *                 \-> OutputConfig-2 -> OutputStream-2
66  *   ```
67  */
68 @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
69 public class CameraStream
70 internal constructor(public val id: StreamId, public val outputs: List<OutputStream>) {
toStringnull71     override fun toString(): String = id.toString()
72 
73     /** Configuration that may be used to define a [CameraStream] on a [CameraGraph] */
74     public class Config
75     internal constructor(
76         public val outputs: List<OutputStream.Config>,
77         public val imageSourceConfig: ImageSourceConfig? = null
78     ) {
79         init {
80             val firstOutput = outputs.first()
81             check(outputs.all { it.format == firstOutput.format }) {
82                 "All outputs must have the same format!"
83             }
84         }
85 
86         public companion object {
87             /** Create a simple [CameraStream] to [OutputStream] configuration */
88             public fun create(
89                 size: Size,
90                 format: StreamFormat,
91                 camera: CameraId? = null,
92                 outputType: OutputStream.OutputType = OutputStream.OutputType.SURFACE,
93                 mirrorMode: OutputStream.MirrorMode? = null,
94                 timestampBase: OutputStream.TimestampBase? = null,
95                 dynamicRangeProfile: OutputStream.DynamicRangeProfile? = null,
96                 streamUseCase: OutputStream.StreamUseCase? = null,
97                 streamUseHint: OutputStream.StreamUseHint? = null,
98                 sensorPixelModes: List<OutputStream.SensorPixelMode> = emptyList(),
99                 imageSourceConfig: ImageSourceConfig? = null,
100             ): Config =
101                 create(
102                     OutputStream.Config.create(
103                         size,
104                         format,
105                         camera,
106                         outputType,
107                         mirrorMode,
108                         timestampBase,
109                         dynamicRangeProfile,
110                         streamUseCase,
111                         streamUseHint,
112                         sensorPixelModes,
113                     ),
114                     imageSourceConfig
115                 )
116 
117             /**
118              * Create a simple [CameraStream] using a previously defined [OutputStream.Config]. This
119              * allows multiple [CameraStream]s to share the same [OutputConfiguration].
120              */
121             public fun create(
122                 output: OutputStream.Config,
123                 imageSourceConfig: ImageSourceConfig? = null
124             ): Config = Config(listOf(output), imageSourceConfig)
125 
126             /**
127              * Create a [CameraStream] from multiple [OutputStream.Config]s. This is used to to
128              * define a [CameraStream] that may produce one or more of the outputs when used in a
129              * request to the camera.
130              */
131             public fun create(
132                 outputs: List<OutputStream.Config>,
133                 imageSourceConfig: ImageSourceConfig? = null
134             ): Config = Config(outputs, imageSourceConfig)
135         }
136     }
137 }
138 
139 /**
140  * This identifies a single surface that is used to tell the camera to produce one or more outputs.
141  */
142 @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
143 @JvmInline
144 public value class StreamId(public val value: Int) {
toStringnull145     override fun toString(): String = "Stream-$value"
146 }
147 
148 /**
149  * A [OutputStream] represents one of the possible outputs that may be produced from a
150  * [CameraStream]. Because some sensors are capable of producing images at different resolutions,
151  * the underlying HAL on the device may produce different sized images for the same request. This
152  * represents one of those potential outputs.
153  */
154 @JvmDefaultWithCompatibility
155 @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
156 public interface OutputStream {
157     // Every output comes from one, and exactly one, CameraStream
158     public val stream: CameraStream
159 
160     public val id: OutputId
161     public val size: Size
162     public val format: StreamFormat
163     public val camera: CameraId
164     public val mirrorMode: MirrorMode?
165     public val timestampBase: TimestampBase?
166     public val dynamicRangeProfile: DynamicRangeProfile?
167     public val streamUseCase: StreamUseCase?
168     public val outputType: OutputType?
169     public val streamUseHint: StreamUseHint?
170 
171     // TODO: Consider adding sensor mode and/or other metadata
172 
173     /**
174      * Configuration object that provides the parameters for a specific input / output stream on
175      * Camera.
176      */
177     public sealed class Config(
178         public val size: Size,
179         public val format: StreamFormat,
180         public val camera: CameraId?,
181         public val mirrorMode: MirrorMode?,
182         public val timestampBase: TimestampBase?,
183         public val dynamicRangeProfile: DynamicRangeProfile?,
184         public val streamUseCase: StreamUseCase?,
185         public val streamUseHint: StreamUseHint?,
186         public val sensorPixelModes: List<SensorPixelMode>,
187     ) {
188         public companion object {
189             public fun create(
190                 size: Size,
191                 format: StreamFormat,
192                 camera: CameraId? = null,
193                 outputType: OutputType = OutputType.SURFACE,
194                 mirrorMode: MirrorMode? = null,
195                 timestampBase: TimestampBase? = null,
196                 dynamicRangeProfile: DynamicRangeProfile? = null,
197                 streamUseCase: StreamUseCase? = null,
198                 streamUseHint: StreamUseHint? = null,
199                 sensorPixelModes: List<SensorPixelMode> = emptyList(),
200             ): Config =
201                 if (
202                     outputType == OutputType.SURFACE_TEXTURE ||
203                         outputType == OutputType.SURFACE_VIEW
204                 ) {
205                     LazyOutputConfig(
206                         size,
207                         format,
208                         camera,
209                         outputType,
210                         mirrorMode,
211                         timestampBase,
212                         dynamicRangeProfile,
213                         streamUseCase,
214                         streamUseHint,
215                         sensorPixelModes,
216                     )
217                 } else {
218                     check(outputType == OutputType.SURFACE)
219                     SimpleOutputConfig(
220                         size,
221                         format,
222                         camera,
223                         mirrorMode,
224                         timestampBase,
225                         dynamicRangeProfile,
226                         streamUseCase,
227                         streamUseHint,
228                         sensorPixelModes,
229                     )
230                 }
231 
232             /** Create a stream configuration from an externally created [OutputConfiguration] */
233             @RequiresApi(33)
234             public fun external(
235                 size: Size,
236                 format: StreamFormat,
237                 camera: CameraId? = null,
238                 externalOutputConfig: OutputConfiguration,
239                 streamUseHint: StreamUseHint?,
240                 sensorPixelModes: List<SensorPixelMode> = emptyList(),
241             ): Config {
242                 return ExternalOutputConfig(
243                     size,
244                     format,
245                     camera,
246                     output = externalOutputConfig,
247                     streamUseHint,
248                     sensorPixelModes,
249                 )
250             }
251         }
252 
253         /** Most outputs only need to define size, format, and cameraId. */
254         internal class SimpleOutputConfig(
255             size: Size,
256             format: StreamFormat,
257             camera: CameraId?,
258             mirrorMode: MirrorMode?,
259             timestampBase: TimestampBase?,
260             dynamicRangeProfile: DynamicRangeProfile?,
261             streamUseCase: StreamUseCase?,
262             streamUseHint: StreamUseHint?,
263             sensorPixelModes: List<SensorPixelMode>,
264         ) :
265             Config(
266                 size,
267                 format,
268                 camera,
269                 mirrorMode,
270                 timestampBase,
271                 dynamicRangeProfile,
272                 streamUseCase,
273                 streamUseHint,
274                 sensorPixelModes,
275             )
276 
277         /**
278          * Used to configure an output with a surface that may be provided after the camera is
279          * running.
280          *
281          * This behavior is allowed on newer versions of the OS and allows the camera to start
282          * running before the UI is fully available. This configuration mode is only allowed for
283          * SurfaceHolder and SurfaceTexture output targets, and must be defined ahead of time (along
284          * with the size, and format) for these [OutputConfiguration]'s to be created.
285          */
286         internal class LazyOutputConfig(
287             size: Size,
288             format: StreamFormat,
289             camera: CameraId?,
290             internal val outputType: OutputType,
291             mirrorMode: MirrorMode?,
292             timestampBase: TimestampBase?,
293             dynamicRangeProfile: DynamicRangeProfile?,
294             streamUseCase: StreamUseCase?,
295             streamUseHint: StreamUseHint?,
296             sensorPixelModes: List<SensorPixelMode>,
297         ) :
298             Config(
299                 size,
300                 format,
301                 camera,
302                 mirrorMode,
303                 timestampBase,
304                 dynamicRangeProfile,
305                 streamUseCase,
306                 streamUseHint,
307                 sensorPixelModes,
308             )
309 
310         /**
311          * Used to define an output that comes from an externally managed OutputConfiguration
312          * object.
313          *
314          * The configuration logic has the following behavior:
315          * - Assumes [OutputConfiguration] has a valid surface
316          * - Assumes [OutputConfiguration] surfaces will not be added / removed / changed.
317          * - If the CameraCaptureSession must be recreated, the [OutputConfiguration] will be
318          *   reused.
319          */
320         @RequiresApi(33)
321         internal class ExternalOutputConfig(
322             size: Size,
323             format: StreamFormat,
324             camera: CameraId?,
325             val output: OutputConfiguration,
326             streamUseHint: StreamUseHint?,
327             sensorPixelModes: List<SensorPixelMode>,
328         ) :
329             Config(
330                 size,
331                 format,
332                 camera,
333                 MirrorMode(Api33Compat.getMirrorMode(output)),
334                 TimestampBase(Api33Compat.getTimestampBase(output)),
335                 DynamicRangeProfile(Api33Compat.getDynamicRangeProfile(output)),
336                 StreamUseCase(Api33Compat.getStreamUseCase(output)),
337                 streamUseHint,
338                 sensorPixelModes,
339             )
340     }
341 
342     public class OutputType private constructor() {
343         public companion object {
344             public val SURFACE: OutputType = OutputType()
345             public val SURFACE_VIEW: OutputType = OutputType()
346             public val SURFACE_TEXTURE: OutputType = OutputType()
347         }
348     }
349 
350     /**
351      * Adds the ability to define the mirrorMode of the OutputStream. [MIRROR_MODE_AUTO] is the
352      * default mirroring mode for the camera device. With this mode, the camera output is mirrored
353      * horizontally for front-facing cameras, and there is no mirroring for rear-facing and external
354      * cameras.
355      *
356      * See the documentation on [OutputConfiguration.setMirrorMode] for more details.
357      */
358     @JvmInline
359     public value class MirrorMode(public val value: Int) {
360         public companion object {
361             public val MIRROR_MODE_AUTO: MirrorMode = MirrorMode(0)
362             public val MIRROR_MODE_NONE: MirrorMode = MirrorMode(1)
363             public val MIRROR_MODE_H: MirrorMode = MirrorMode(2)
364             public val MIRROR_MODE_V: MirrorMode = MirrorMode(3)
365         }
366     }
367 
368     /**
369      * Adds the ability to define the timestamp base of the OutputStream. [TIMESTAMP_BASE_DEFAULT]
370      * is the default timestamp base, with which the camera device adjusts timestamps based on the
371      * output target.
372      *
373      * See the documentation on [OutputConfiguration.setTimestampBase] for more details.
374      */
375     @JvmInline
376     public value class TimestampBase(public val value: Int) {
377         public companion object {
378             public val TIMESTAMP_BASE_DEFAULT: TimestampBase = TimestampBase(0)
379             public val TIMESTAMP_BASE_SENSOR: TimestampBase = TimestampBase(1)
380             public val TIMESTAMP_BASE_MONOTONIC: TimestampBase = TimestampBase(2)
381             public val TIMESTAMP_BASE_REALTIME: TimestampBase = TimestampBase(3)
382             public val TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED: TimestampBase = TimestampBase(4)
383         }
384     }
385 
386     /**
387      * Adds the ability to define the dynamic range profile of the OutputStream. [STANDARD] is the
388      * default dynamic range profile for the camera device, with which the camera device uses an
389      * 8-bit standard profile.
390      *
391      * See the documentation on [OutputConfiguration.setDynamicRangeProfile] for more details.
392      */
393     @JvmInline
394     public value class DynamicRangeProfile(public val value: Long) {
395         public companion object {
396             public val STANDARD: DynamicRangeProfile = DynamicRangeProfile(1)
397             public val HLG10: DynamicRangeProfile = DynamicRangeProfile(2)
398             public val HDR10: DynamicRangeProfile = DynamicRangeProfile(4)
399             public val HDR10_PLUS: DynamicRangeProfile = DynamicRangeProfile(8)
400             public val DOLBY_VISION_10B_HDR_REF: DynamicRangeProfile = DynamicRangeProfile(16)
401             public val DOLBY_VISION_10B_HDR_REF_PO: DynamicRangeProfile = DynamicRangeProfile(32)
402             public val DOLBY_VISION_10B_HDR_OEM: DynamicRangeProfile = DynamicRangeProfile(64)
403             public val DOLBY_VISION_10B_HDR_OEM_PO: DynamicRangeProfile = DynamicRangeProfile(128)
404             public val DOLBY_VISION_8B_HDR_REF: DynamicRangeProfile = DynamicRangeProfile(256)
405             public val DOLBY_VISION_8B_HDR_REF_PO: DynamicRangeProfile = DynamicRangeProfile(512)
406             public val DOLBY_VISION_8B_HDR_OEM: DynamicRangeProfile = DynamicRangeProfile(1024)
407             public val DOLBY_VISION_8B_HDR_OEM_PO: DynamicRangeProfile = DynamicRangeProfile(2048)
408             public val PUBLIC_MAX: DynamicRangeProfile = DynamicRangeProfile(4096)
409         }
410     }
411 
412     /**
413      * Until all devices can support StreamUseCases and edge cases are resolved, [StreamUseHint] can
414      * temporarily be used to give a hint on the purpose of the stream.
415      */
416     @JvmInline
417     public value class StreamUseHint(public val value: Long) {
418 
419         public companion object {
420             public val DEFAULT: StreamUseHint = StreamUseHint(0)
421             public val VIDEO_RECORD: StreamUseHint = StreamUseHint(1)
422         }
423     }
424 
425     /**
426      * Adds the ability to define the stream specific use case of the OutputStream. [DEFAULT] is the
427      * default stream use case, with which the camera device uses the properties of the output
428      * target, such as format, dataSpace, or surface class type, to optimize the image processing
429      * pipeline.
430      *
431      * See the documentation on [OutputConfiguration.setStreamUseCase] for more details.
432      */
433     @JvmInline
434     public value class StreamUseCase(public val value: Long) {
435         public companion object {
436             public val DEFAULT: StreamUseCase = StreamUseCase(0)
437             public val PREVIEW: StreamUseCase = StreamUseCase(1)
438             public val STILL_CAPTURE: StreamUseCase = StreamUseCase(2)
439             public val VIDEO_RECORD: StreamUseCase = StreamUseCase(3)
440             public val PREVIEW_VIDEO_STILL: StreamUseCase = StreamUseCase(4)
441             public val VIDEO_CALL: StreamUseCase = StreamUseCase(5)
442             public val CROPPED_RAW: StreamUseCase = StreamUseCase(6)
443         }
444     }
445 
446     /**
447      * Used to set the sensor pixel mode the OutputStream will be used in.
448      *
449      * See the documentation on [OutputConfiguration.addSensorPixelModeUsed] for more details.
450      */
451     @JvmInline
452     public value class SensorPixelMode(public val value: Int) {
453         public companion object {
454             public val DEFAULT: SensorPixelMode = SensorPixelMode(0)
455             public val MAXIMUM_RESOLUTION: SensorPixelMode = SensorPixelMode(1)
456         }
457     }
458 
459     /**
460      * If this OutputStream is a valid stream for HIGH_SPEED recording. The requirement is that the
461      * surface must be either video encoder surface or preview surface. The checks below can be used
462      * to ensure that the we are passing along the right intention for any further checks when
463      * actually configuring and using this stream.
464      *
465      * [Camera2 reference]
466      * [https://developer.android.com/reference/android/hardware/camera2/CameraDevice#constrained-high-speed-recording]
467      */
468     public fun isValidForHighSpeedOperatingMode(): Boolean {
469         return this.streamUseCase == null ||
470             this.streamUseCase == DEFAULT ||
471             this.streamUseCase == StreamUseCase.PREVIEW ||
472             this.streamUseCase == StreamUseCase.VIDEO_RECORD ||
473             this.streamUseHint == null ||
474             this.streamUseHint == StreamUseHint.DEFAULT ||
475             this.streamUseHint == StreamUseHint.VIDEO_RECORD
476     }
477 }
478 
479 /** Configuration for a CameraStream that will be internally configured to produce images. */
480 @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
481 public class ImageSourceConfig(
482     public val capacity: Int,
483     public val usageFlags: Long? = null,
484     public val defaultDataSpace: Int? = null,
485     public val defaultHardwareBufferFormat: Int? = null
486 )
487 
488 /** This identifies a single output. */
489 @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
490 @JvmInline
491 public value class OutputId(public val value: Int) {
toStringnull492     override fun toString(): String = "Output-$value"
493 }
494 
495 /** Configuration for defining the properties of a Camera2 InputStream for reprocessing requests. */
496 @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
497 public interface InputStream {
498     public val id: InputStreamId
499     public val maxImages: Int
500     public val format: StreamFormat
501 
502     public class Config(
503         public val stream: CameraStream.Config,
504         public val maxImages: Int,
505         public var streamFormat: StreamFormat
506     )
507 }
508 
509 /** This identifies a single input. */
510 @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
511 @JvmInline
512 public value class InputStreamId(public val value: Int) {
toStringnull513     override fun toString(): String = "Input-$value"
514 }
515