1 /*
2  * Copyright 2020 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package androidx.camera.video;
18 
19 import static androidx.camera.core.CameraEffect.VIDEO_CAPTURE;
20 import static androidx.camera.core.impl.ImageFormatConstants.INTERNAL_DEFINED_IMAGE_FORMAT_PRIVATE;
21 import static androidx.camera.core.impl.ImageInputConfig.OPTION_INPUT_DYNAMIC_RANGE;
22 import static androidx.camera.core.impl.ImageOutputConfig.OPTION_CUSTOM_ORDERED_RESOLUTIONS;
23 import static androidx.camera.core.impl.ImageOutputConfig.OPTION_DEFAULT_RESOLUTION;
24 import static androidx.camera.core.impl.ImageOutputConfig.OPTION_MAX_RESOLUTION;
25 import static androidx.camera.core.impl.ImageOutputConfig.OPTION_MIRROR_MODE;
26 import static androidx.camera.core.impl.ImageOutputConfig.OPTION_RESOLUTION_SELECTOR;
27 import static androidx.camera.core.impl.ImageOutputConfig.OPTION_SUPPORTED_RESOLUTIONS;
28 import static androidx.camera.core.impl.ImageOutputConfig.OPTION_TARGET_ROTATION;
29 import static androidx.camera.core.impl.SessionConfig.SESSION_TYPE_HIGH_SPEED;
30 import static androidx.camera.core.impl.SessionConfig.SESSION_TYPE_REGULAR;
31 import static androidx.camera.core.impl.StreamSpec.FRAME_RATE_RANGE_UNSPECIFIED;
32 import static androidx.camera.core.impl.UseCaseConfig.OPTION_CAPTURE_CONFIG_UNPACKER;
33 import static androidx.camera.core.impl.UseCaseConfig.OPTION_CAPTURE_TYPE;
34 import static androidx.camera.core.impl.UseCaseConfig.OPTION_DEFAULT_CAPTURE_CONFIG;
35 import static androidx.camera.core.impl.UseCaseConfig.OPTION_DEFAULT_SESSION_CONFIG;
36 import static androidx.camera.core.impl.UseCaseConfig.OPTION_HIGH_RESOLUTION_DISABLED;
37 import static androidx.camera.core.impl.UseCaseConfig.OPTION_SESSION_CONFIG_UNPACKER;
38 import static androidx.camera.core.impl.UseCaseConfig.OPTION_SURFACE_OCCUPANCY_PRIORITY;
39 import static androidx.camera.core.impl.UseCaseConfig.OPTION_TARGET_FRAME_RATE;
40 import static androidx.camera.core.impl.UseCaseConfig.OPTION_VIDEO_STABILIZATION_MODE;
41 import static androidx.camera.core.impl.UseCaseConfig.OPTION_ZSL_DISABLED;
42 import static androidx.camera.core.impl.utils.Threads.isMainThread;
43 import static androidx.camera.core.impl.utils.TransformUtils.rectToString;
44 import static androidx.camera.core.impl.utils.TransformUtils.within360;
45 import static androidx.camera.core.internal.TargetConfig.OPTION_TARGET_CLASS;
46 import static androidx.camera.core.internal.TargetConfig.OPTION_TARGET_NAME;
47 import static androidx.camera.core.internal.ThreadConfig.OPTION_BACKGROUND_EXECUTOR;
48 import static androidx.camera.core.internal.compat.quirk.SurfaceProcessingQuirk.workaroundBySurfaceProcessing;
49 import static androidx.camera.core.internal.utils.SizeUtil.getArea;
50 import static androidx.camera.video.QualitySelector.getQualityToResolutionMap;
51 import static androidx.camera.video.StreamInfo.STREAM_ID_ERROR;
52 import static androidx.camera.video.impl.VideoCaptureConfig.OPTION_FORCE_ENABLE_SURFACE_PROCESSING;
53 import static androidx.camera.video.impl.VideoCaptureConfig.OPTION_VIDEO_ENCODER_INFO_FINDER;
54 import static androidx.camera.video.impl.VideoCaptureConfig.OPTION_VIDEO_OUTPUT;
55 import static androidx.camera.video.internal.config.VideoConfigUtil.resolveVideoMimeInfo;
56 import static androidx.camera.video.internal.utils.DynamicRangeUtil.isHdrSettingsMatched;
57 import static androidx.camera.video.internal.utils.DynamicRangeUtil.videoProfileBitDepthToDynamicRangeBitDepth;
58 import static androidx.camera.video.internal.utils.DynamicRangeUtil.videoProfileHdrFormatsToDynamicRangeEncoding;
59 import static androidx.core.util.Preconditions.checkState;
60 
61 import static java.util.Collections.emptyMap;
62 import static java.util.Collections.singletonList;
63 import static java.util.Objects.requireNonNull;
64 
65 import android.annotation.SuppressLint;
66 import android.graphics.Rect;
67 import android.media.MediaCodec;
68 import android.os.SystemClock;
69 import android.util.Pair;
70 import android.util.Range;
71 import android.util.Size;
72 import android.view.Display;
73 import android.view.Surface;
74 
75 import androidx.annotation.MainThread;
76 import androidx.annotation.RestrictTo;
77 import androidx.annotation.RestrictTo.Scope;
78 import androidx.annotation.VisibleForTesting;
79 import androidx.camera.core.AspectRatio;
80 import androidx.camera.core.CameraInfo;
81 import androidx.camera.core.DynamicRange;
82 import androidx.camera.core.ImageCapture;
83 import androidx.camera.core.Logger;
84 import androidx.camera.core.MirrorMode;
85 import androidx.camera.core.Preview;
86 import androidx.camera.core.ResolutionInfo;
87 import androidx.camera.core.SurfaceRequest;
88 import androidx.camera.core.SurfaceRequest.TransformationInfo;
89 import androidx.camera.core.UseCase;
90 import androidx.camera.core.ViewPort;
91 import androidx.camera.core.impl.CameraCaptureCallback;
92 import androidx.camera.core.impl.CameraCaptureResult;
93 import androidx.camera.core.impl.CameraControlInternal;
94 import androidx.camera.core.impl.CameraInfoInternal;
95 import androidx.camera.core.impl.CameraInternal;
96 import androidx.camera.core.impl.CaptureConfig;
97 import androidx.camera.core.impl.Config;
98 import androidx.camera.core.impl.ConfigProvider;
99 import androidx.camera.core.impl.DeferrableSurface;
100 import androidx.camera.core.impl.EncoderProfilesProxy;
101 import androidx.camera.core.impl.ImageInputConfig;
102 import androidx.camera.core.impl.ImageOutputConfig;
103 import androidx.camera.core.impl.ImageOutputConfig.RotationValue;
104 import androidx.camera.core.impl.MutableConfig;
105 import androidx.camera.core.impl.MutableOptionsBundle;
106 import androidx.camera.core.impl.Observable;
107 import androidx.camera.core.impl.Observable.Observer;
108 import androidx.camera.core.impl.OptionsBundle;
109 import androidx.camera.core.impl.SessionConfig;
110 import androidx.camera.core.impl.StreamSpec;
111 import androidx.camera.core.impl.Timebase;
112 import androidx.camera.core.impl.UseCaseConfig;
113 import androidx.camera.core.impl.UseCaseConfigFactory;
114 import androidx.camera.core.impl.stabilization.StabilizationMode;
115 import androidx.camera.core.impl.utils.Threads;
116 import androidx.camera.core.impl.utils.TransformUtils;
117 import androidx.camera.core.impl.utils.executor.CameraXExecutors;
118 import androidx.camera.core.impl.utils.futures.FutureCallback;
119 import androidx.camera.core.impl.utils.futures.Futures;
120 import androidx.camera.core.internal.ThreadConfig;
121 import androidx.camera.core.processing.DefaultSurfaceProcessor;
122 import androidx.camera.core.processing.SurfaceEdge;
123 import androidx.camera.core.processing.SurfaceProcessorNode;
124 import androidx.camera.core.processing.util.OutConfig;
125 import androidx.camera.core.resolutionselector.ResolutionSelector;
126 import androidx.camera.video.StreamInfo.StreamState;
127 import androidx.camera.video.impl.VideoCaptureConfig;
128 import androidx.camera.video.internal.VideoValidatedEncoderProfilesProxy;
129 import androidx.camera.video.internal.compat.quirk.DeviceQuirks;
130 import androidx.camera.video.internal.compat.quirk.HdrRepeatingRequestFailureQuirk;
131 import androidx.camera.video.internal.compat.quirk.SizeCannotEncodeVideoQuirk;
132 import androidx.camera.video.internal.config.VideoMimeInfo;
133 import androidx.camera.video.internal.encoder.SwappedVideoEncoderInfo;
134 import androidx.camera.video.internal.encoder.VideoEncoderInfo;
135 import androidx.camera.video.internal.encoder.VideoEncoderInfoImpl;
136 import androidx.camera.video.internal.workaround.VideoEncoderInfoWrapper;
137 import androidx.concurrent.futures.CallbackToFutureAdapter;
138 import androidx.core.util.Preconditions;
139 
140 import com.google.common.util.concurrent.ListenableFuture;
141 
142 import org.jspecify.annotations.NonNull;
143 import org.jspecify.annotations.Nullable;
144 
145 import java.lang.reflect.Type;
146 import java.util.ArrayList;
147 import java.util.Collections;
148 import java.util.HashSet;
149 import java.util.Iterator;
150 import java.util.LinkedHashMap;
151 import java.util.List;
152 import java.util.Map;
153 import java.util.Objects;
154 import java.util.Set;
155 import java.util.UUID;
156 import java.util.concurrent.CancellationException;
157 import java.util.concurrent.ExecutionException;
158 import java.util.concurrent.Executor;
159 import java.util.concurrent.atomic.AtomicBoolean;
160 
161 /**
162  * A use case that provides camera stream suitable for video application.
163  *
164  * <p>VideoCapture is used to create a camera stream suitable for a video application such as
165  * recording a high-quality video to a file. The camera stream is used by the extended classes of
166  * {@link VideoOutput}.
167  * {@link #withOutput(VideoOutput)} can be used to create a VideoCapture instance associated with
168  * the given VideoOutput. Take {@link Recorder} as an example,
169  * <pre>{@code
170  *         VideoCapture<Recorder> videoCapture
171  *                 = VideoCapture.withOutput(new Recorder.Builder().build());
172  * }</pre>
173  * Then {@link #getOutput()} can retrieve the Recorder instance.
174  *
175  * @param <T> the type of VideoOutput
176  */
177 public final class VideoCapture<T extends VideoOutput> extends UseCase {
178     private static final String TAG = "VideoCapture";
179     private static final String SURFACE_UPDATE_KEY =
180             "androidx.camera.video.VideoCapture.streamUpdate";
181     private static final Defaults DEFAULT_CONFIG = new Defaults();
182 
183     @SuppressWarnings("WeakerAccess") // Synthetic access
184     DeferrableSurface mDeferrableSurface;
185     private @Nullable SurfaceEdge mCameraEdge;
186     @SuppressWarnings("WeakerAccess") // Synthetic access
187     StreamInfo mStreamInfo = StreamInfo.STREAM_INFO_ANY_INACTIVE;
188     @SuppressWarnings("WeakerAccess") // Synthetic access
189     SessionConfig.@NonNull Builder mSessionConfigBuilder = new SessionConfig.Builder();
190     @SuppressWarnings("WeakerAccess") // Synthetic access
191     ListenableFuture<Void> mSurfaceUpdateFuture = null;
192     private SurfaceRequest mSurfaceRequest;
193     @SuppressWarnings("WeakerAccess") // Synthetic access
194     VideoOutput.SourceState mSourceState = VideoOutput.SourceState.INACTIVE;
195     private @Nullable SurfaceProcessorNode mNode;
196     private @Nullable Rect mCropRect;
197     private int mRotationDegrees;
198     private boolean mHasCompensatingTransformation = false;
199     private @Nullable SourceStreamRequirementObserver mSourceStreamRequirementObserver;
200     private SessionConfig.@Nullable CloseableErrorListener mCloseableErrorListener;
201     private Map<Quality, List<Size>> mQualityToCustomSizesMap = emptyMap();
202 
203     /**
204      * Create a VideoCapture associated with the given {@link VideoOutput}.
205      *
206      * @throws NullPointerException if {@code videoOutput} is null.
207      */
withOutput( @onNull T videoOutput)208     public static <T extends VideoOutput> @NonNull VideoCapture<T> withOutput(
209             @NonNull T videoOutput) {
210         return new VideoCapture.Builder<>(Preconditions.checkNotNull(videoOutput)).build();
211     }
212 
213     /**
214      * Creates a new video capture use case from the given configuration.
215      *
216      * @param config for this use case instance
217      */
VideoCapture(@onNull VideoCaptureConfig<T> config)218     VideoCapture(@NonNull VideoCaptureConfig<T> config) {
219         super(config);
220     }
221 
222     /**
223      * Gets the {@link VideoOutput} associated with this VideoCapture.
224      *
225      * @return the value provided to {@link #withOutput(VideoOutput)} used to create this
226      * VideoCapture.
227      */
228     @SuppressWarnings("unchecked")
getOutput()229     public @NonNull T getOutput() {
230         return ((VideoCaptureConfig<T>) getCurrentConfig()).getVideoOutput();
231     }
232 
233     /**
234      * Returns the desired rotation of the output video.
235      *
236      * <p>The rotation can be set prior to constructing a VideoCapture using
237      * {@link VideoCapture.Builder#setTargetRotation(int)} or dynamically by calling
238      * {@link VideoCapture#setTargetRotation(int)}.
239      * If not set, the target rotation defaults to the value of {@link Display#getRotation()} of
240      * the default display at the time the use case is bound.
241      *
242      * @return The rotation of the intended target.
243      * @see VideoCapture#setTargetRotation(int)
244      */
245     @RotationValue
getTargetRotation()246     public int getTargetRotation() {
247         return getTargetRotationInternal();
248     }
249 
250     /**
251      * Returns the target frame rate range, in frames per second, for the associated VideoCapture
252      * use case.
253      *
254      * <p>The target frame rate can be set prior to constructing a VideoCapture using
255      * {@link VideoCapture.Builder#setTargetFrameRate(Range)}
256      * If not set, the target frame rate defaults to the value of
257      * {@link StreamSpec#FRAME_RATE_RANGE_UNSPECIFIED}
258      *
259      * @return The target frame rate of the intended target.
260      */
getTargetFrameRate()261     public @NonNull Range<Integer> getTargetFrameRate() {
262         return getTargetFrameRateInternal();
263     }
264 
265     /**
266      * Returns whether video stabilization is enabled.
267      */
isVideoStabilizationEnabled()268     public boolean isVideoStabilizationEnabled() {
269         return getCurrentConfig().getVideoStabilizationMode() == StabilizationMode.ON;
270     }
271 
272     /**
273      * Sets the desired rotation of the output video.
274      *
275      * <p>Valid values include: {@link Surface#ROTATION_0}, {@link Surface#ROTATION_90},
276      * {@link Surface#ROTATION_180}, {@link Surface#ROTATION_270}.
277      * Rotation values are relative to the "natural" rotation, {@link Surface#ROTATION_0}.
278      *
279      * <p>While rotation can also be set via {@link Builder#setTargetRotation(int)}, using
280      * {@code setTargetRotation(int)} allows the target rotation to be set dynamically.
281      *
282      * <p>In general, it is best to use an {@link android.view.OrientationEventListener} to set
283      * the target rotation. This way, the rotation output will indicate which way is down for a
284      * given video. This is important since display orientation may be locked by device default,
285      * user setting, or app configuration, and some devices may not transition to a
286      * reverse-portrait display orientation. In these cases, set target rotation dynamically
287      * according to the {@link android.view.OrientationEventListener}, without re-creating the
288      * use case. {@link UseCase#snapToSurfaceRotation(int)} is a helper function to convert the
289      * orientation of the {@link android.view.OrientationEventListener} to a rotation value.
290      * See {@link UseCase#snapToSurfaceRotation(int)} for more information and sample code.
291      *
292      * <p>If not set, the target rotation will default to the value of
293      * {@link Display#getRotation()} of the default display at the time the use case is bound. To
294      * return to the default value, set the value to
295      * <pre>{@code
296      * context.getSystemService(WindowManager.class).getDefaultDisplay().getRotation();
297      * }</pre>
298      *
299      * <p>For a {@link Recorder} output, calling this method has no effect on the ongoing
300      * recording, but will affect recordings started after calling this method. The final
301      * rotation degrees of the video, including the degrees set by this method and the orientation
302      * of the camera sensor, will be reflected by several possibilities, 1) the rotation degrees is
303      * written into the video metadata, 2) the video content is directly rotated, 3) both, i.e.
304      * rotation metadata and rotated video content which combines to the target rotation. CameraX
305      * will choose a strategy according to the use case.
306      *
307      * @param rotation Desired rotation of the output video, expressed as one of
308      *                 {@link Surface#ROTATION_0}, {@link Surface#ROTATION_90},
309      *                 {@link Surface#ROTATION_180}, or {@link Surface#ROTATION_270}.
310      */
setTargetRotation(@otationValue int rotation)311     public void setTargetRotation(@RotationValue int rotation) {
312         if (setTargetRotationInternal(rotation)) {
313             sendTransformationInfoIfReady();
314         }
315     }
316 
317     /**
318      * Returns information about the selected resolution.
319      *
320      * <p>Note that the {@link ResolutionInfo#getResolution()} might not be the same as the
321      * resolution of the recorded video because the video might have been rotated according to
322      * the camera sensor orientation and the target rotation, and/or have been cropped according
323      * to the {@link androidx.camera.core.ViewPort} settings.
324      * The recorded video resolution can be determined by applying the
325      * {@link ResolutionInfo#getRotationDegrees()} to the size of
326      * {@link ResolutionInfo#getCropRect()}.
327      *
328      * <p>The resolution information may change if:
329      * <ul>
330      * <li>The use case is unbound and then rebound.
331      * <li>{@link #setTargetRotation(int)} is called to change the target rotation.
332      * </ul>
333      *
334      * <p>If changes occur, the application should call {@code getResolutionInfo()} again
335      * to get the latest {@link ResolutionInfo}.
336      *
337      * @return the resolution information if the use case is bound by the
338      * {@link androidx.camera.lifecycle.ProcessCameraProvider#bindToLifecycle} API, or {@code
339      * null} if the use case is not yet bound.
340      */
getResolutionInfo()341     public @Nullable ResolutionInfo getResolutionInfo() {
342         return getResolutionInfoInternal();
343     }
344 
345     /**
346      * Returns the selected Quality.
347      *
348      * <p>The selected Quality represents the final quality level chosen for the stream. The
349      * selected Quality will be one of the specified qualities from the {@link QualitySelector}
350      * provided by the associated {@link VideoOutput}. If {@link Quality#HIGHEST} or
351      * {@link Quality#LOWEST} is specified in the selector, it will be resolved to an actual
352      * Quality value. Even if the stream is later cropped (e.g., by using a {@link ViewPort}), this
353      * value represents the original quality level of the stream.
354      *
355      * <p>This method will return the selected Quality only after the use case is bound using
356      * {@link androidx.camera.lifecycle.ProcessCameraProvider#bindToLifecycle}. Otherwise, it
357      * will return null. The selected Quality may change if the use case is unbound and then
358      * rebound.
359      *
360      * @return The selected Quality if the use case is bound, or null otherwise.
361      */
getSelectedQuality()362     public @Nullable Quality getSelectedQuality() {
363         StreamSpec streamSpec = getAttachedStreamSpec();
364         if (streamSpec == null) {
365             return null;
366         }
367         // In the general case, there should be an exact match from configured resolution to
368         // Quality.
369         Size configuredResolution = streamSpec.getOriginalConfiguredResolution();
370         for (Map.Entry<Quality, List<Size>> entry : mQualityToCustomSizesMap.entrySet()) {
371             if (entry.getValue().contains(configuredResolution)) {
372                 return entry.getKey(); // Found exact match, no need to check further
373             }
374         }
375         Logger.w(TAG, "Can't find matched Quality for " + configuredResolution);
376 
377         // Fallback to find the nearest available quality. This can occur when StreamSharing
378         // is unable to downscale/crop the camera stream according to the UseCase's preferred
379         // resolution and instead returns the original camera stream resolution.
380         return findNearestSizeFor(mQualityToCustomSizesMap, configuredResolution);
381     }
382 
383     @RestrictTo(Scope.LIBRARY_GROUP)
384     @Override
getResolutionInfoInternal()385     protected @Nullable ResolutionInfo getResolutionInfoInternal() {
386         CameraInternal camera = getCamera();
387         Size resolution = getAttachedSurfaceResolution();
388         Rect cropRect = mCropRect;
389         int rotationDegrees = mRotationDegrees;
390 
391         if (camera == null || resolution == null || cropRect == null) {
392             return null;
393         }
394 
395         return new ResolutionInfo(resolution, cropRect, rotationDegrees);
396     }
397 
398     /**
399      * Returns the mirror mode.
400      *
401      * <p>The mirror mode is set by {@link VideoCapture.Builder#setMirrorMode(int)}. If not set,
402      * it defaults to {@link MirrorMode#MIRROR_MODE_OFF}.
403      *
404      * @return The mirror mode of the intended target.
405      */
406     @MirrorMode.Mirror
getMirrorMode()407     public int getMirrorMode() {
408         int mirrorMode = getMirrorModeInternal();
409         if (mirrorMode == MirrorMode.MIRROR_MODE_UNSPECIFIED) {
410             return MirrorMode.MIRROR_MODE_OFF;
411         }
412         return mirrorMode;
413     }
414 
415     @SuppressWarnings("unchecked")
416     @RestrictTo(Scope.LIBRARY_GROUP)
417     @Override
onSuggestedStreamSpecUpdated( @onNull StreamSpec primaryStreamSpec, @Nullable StreamSpec secondaryStreamSpec)418     protected @NonNull StreamSpec onSuggestedStreamSpecUpdated(
419             @NonNull StreamSpec primaryStreamSpec,
420             @Nullable StreamSpec secondaryStreamSpec) {
421         Logger.d(TAG, "onSuggestedStreamSpecUpdated: " + primaryStreamSpec);
422         VideoCaptureConfig<T> config = (VideoCaptureConfig<T>) getCurrentConfig();
423         List<Size> customOrderedResolutions = config.getCustomOrderedResolutions(null);
424         if (customOrderedResolutions != null
425                 && !customOrderedResolutions.contains(primaryStreamSpec.getResolution())) {
426             Logger.w(TAG, "suggested resolution " + primaryStreamSpec.getResolution()
427                     + " is not in custom ordered resolutions " + customOrderedResolutions);
428         }
429         return primaryStreamSpec;
430     }
431 
432     /**
433      * Returns the dynamic range.
434      *
435      * <p>The dynamic range is set by {@link VideoCapture.Builder#setDynamicRange(DynamicRange)}.
436      * If the dynamic range set is not a fully defined dynamic range, such as
437      * {@link DynamicRange#HDR_UNSPECIFIED_10_BIT}, then it will be returned just as provided,
438      * and will not be returned as a fully defined dynamic range.
439      *
440      * <p>If the dynamic range was not provided to
441      * {@link VideoCapture.Builder#setDynamicRange(DynamicRange)}, this will return the default of
442      * {@link DynamicRange#SDR}
443      *
444      * @return the dynamic range set for this {@code VideoCapture} use case.
445      */
446     // Internal implementation note: this method should not be used to retrieve the dynamic range
447     // that will be sent to the VideoOutput. That should always be retrieved from the StreamSpec
448     // since that will be the final DynamicRange chosen by the camera based on other use case
449     // combinations.
getDynamicRange()450     public @NonNull DynamicRange getDynamicRange() {
451         return getCurrentConfig().hasDynamicRange() ? getCurrentConfig().getDynamicRange() :
452                 Defaults.DEFAULT_DYNAMIC_RANGE;
453     }
454 
455     /**
456      * {@inheritDoc}
457      */
458     @SuppressWarnings("unchecked")
459     @RestrictTo(Scope.LIBRARY_GROUP)
460     @Override
onStateAttached()461     public void onStateAttached() {
462         super.onStateAttached();
463 
464         Logger.d(TAG, "VideoCapture#onStateAttached: cameraID = " + getCameraId());
465 
466         // For concurrent camera, the surface request might not be null when switching
467         // from single to dual camera.
468         if (getAttachedStreamSpec() == null || mSurfaceRequest != null) {
469             return;
470         }
471         StreamSpec attachedStreamSpec = Preconditions.checkNotNull(getAttachedStreamSpec());
472         mStreamInfo = fetchObservableValue(getOutput().getStreamInfo(),
473                 StreamInfo.STREAM_INFO_ANY_INACTIVE);
474         mSessionConfigBuilder = createPipeline(
475                 (VideoCaptureConfig<T>) getCurrentConfig(), attachedStreamSpec);
476         applyStreamInfoAndStreamSpecToSessionConfigBuilder(mSessionConfigBuilder, mStreamInfo,
477                 attachedStreamSpec);
478         updateSessionConfig(List.of(mSessionConfigBuilder.build()));
479         // VideoCapture has to be active to apply SessionConfig's template type.
480         notifyActive();
481         getOutput().getStreamInfo().addObserver(CameraXExecutors.mainThreadExecutor(),
482                 mStreamInfoObserver);
483         if (mSourceStreamRequirementObserver != null) {
484             // In case a previous observer was not closed, close it first
485             mSourceStreamRequirementObserver.close();
486         }
487         // Camera should be already bound by now, so calling getCameraControl() is ok
488         mSourceStreamRequirementObserver = new SourceStreamRequirementObserver(getCameraControl());
489         // Should automatically trigger once for latest data
490         getOutput().isSourceStreamRequired().addObserver(CameraXExecutors.mainThreadExecutor(),
491                 mSourceStreamRequirementObserver);
492         setSourceState(VideoOutput.SourceState.ACTIVE_NON_STREAMING);
493     }
494 
495     /**
496      * {@inheritDoc}
497      */
498     @Override
499     @RestrictTo(Scope.LIBRARY_GROUP)
setViewPortCropRect(@onNull Rect viewPortCropRect)500     public void setViewPortCropRect(@NonNull Rect viewPortCropRect) {
501         super.setViewPortCropRect(viewPortCropRect);
502         sendTransformationInfoIfReady();
503     }
504 
505     /**
506      * {@inheritDoc}
507      */
508     @RestrictTo(Scope.LIBRARY_GROUP)
509     @Override
onStateDetached()510     public void onStateDetached() {
511         Logger.d(TAG, "VideoCapture#onStateDetached");
512 
513         checkState(isMainThread(), "VideoCapture can only be detached on the main thread.");
514 
515         // It's safer to remove and close mSourceStreamRequirementObserver before stopping recorder
516         // in case there is some bug leading to double video usage decrement updates (e.g. once for
517         // recorder stop and once for observer close)
518         if (mSourceStreamRequirementObserver != null) {
519             getOutput().isSourceStreamRequired().removeObserver(mSourceStreamRequirementObserver);
520             mSourceStreamRequirementObserver.close();
521             mSourceStreamRequirementObserver = null;
522         }
523 
524         setSourceState(VideoOutput.SourceState.INACTIVE);
525         getOutput().getStreamInfo().removeObserver(mStreamInfoObserver);
526 
527         if (mSurfaceUpdateFuture != null) {
528             if (mSurfaceUpdateFuture.cancel(false)) {
529                 Logger.d(TAG, "VideoCapture is detached from the camera. Surface update "
530                         + "cancelled.");
531             }
532         }
533         // Clear the pipeline to close the surface, which releases the codec so that it's
534         // available for other applications.
535         clearPipeline();
536     }
537 
538     /**
539      * {@inheritDoc}
540      */
541     @Override
542     @RestrictTo(Scope.LIBRARY_GROUP)
onSuggestedStreamSpecImplementationOptionsUpdated( @onNull Config config)543     protected @NonNull StreamSpec onSuggestedStreamSpecImplementationOptionsUpdated(
544             @NonNull Config config) {
545         mSessionConfigBuilder.addImplementationOptions(config);
546         updateSessionConfig(List.of(mSessionConfigBuilder.build()));
547         return requireNonNull(getAttachedStreamSpec()).toBuilder()
548                 .setImplementationOptions(config).build();
549     }
550 
551     @Override
toString()552     public @NonNull String toString() {
553         return TAG + ":" + getName();
554     }
555 
556     /**
557      * {@inheritDoc}
558      */
559     @RestrictTo(Scope.LIBRARY_GROUP)
560     @Override
getDefaultConfig(boolean applyDefaultConfig, @NonNull UseCaseConfigFactory factory)561     public @Nullable UseCaseConfig<?> getDefaultConfig(boolean applyDefaultConfig,
562             @NonNull UseCaseConfigFactory factory) {
563         Config captureConfig = factory.getConfig(
564                 DEFAULT_CONFIG.getConfig().getCaptureType(),
565                 ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY);
566 
567         if (applyDefaultConfig) {
568             captureConfig = Config.mergeConfigs(captureConfig, DEFAULT_CONFIG.getConfig());
569         }
570 
571         return captureConfig == null ? null :
572                 getUseCaseConfigBuilder(captureConfig).getUseCaseConfig();
573     }
574 
575     /**
576      * {@inheritDoc}
577      */
578     @RestrictTo(Scope.LIBRARY_GROUP)
579     @Override
onMergeConfig(@onNull CameraInfoInternal cameraInfo, UseCaseConfig.@NonNull Builder<?, ?, ?> builder)580     protected @NonNull UseCaseConfig<?> onMergeConfig(@NonNull CameraInfoInternal cameraInfo,
581             UseCaseConfig.@NonNull Builder<?, ?, ?> builder) {
582 
583         updateCustomOrderedResolutionsByQuality(cameraInfo, builder);
584 
585         return builder.getUseCaseConfig();
586     }
587 
588     /**
589      * {@inheritDoc}
590      */
591     @RestrictTo(Scope.LIBRARY_GROUP)
592     @Override
getUseCaseConfigBuilder(@onNull Config config)593     public UseCaseConfig.@NonNull Builder<?, ?, ?> getUseCaseConfigBuilder(@NonNull Config config) {
594         return Builder.fromConfig(config);
595     }
596 
sendTransformationInfoIfReady()597     private void sendTransformationInfoIfReady() {
598         CameraInternal cameraInternal = getCamera();
599         SurfaceEdge cameraEdge = mCameraEdge;
600         if (cameraInternal != null && cameraEdge != null) {
601             mRotationDegrees = getCompensatedRotation(cameraInternal);
602             cameraEdge.updateTransformation(mRotationDegrees, getAppTargetRotation());
603         }
604     }
605 
adjustCropRectWithInProgressTransformation(@onNull Rect cropRect, int rotationDegrees)606     private @NonNull Rect adjustCropRectWithInProgressTransformation(@NonNull Rect cropRect,
607             int rotationDegrees) {
608         Rect adjustedCropRect = cropRect;
609         if (shouldCompensateTransformation()) {
610             adjustedCropRect = TransformUtils.sizeToRect(TransformUtils.getRotatedSize(
611                     Preconditions.checkNotNull(
612                             mStreamInfo.getInProgressTransformationInfo()).getCropRect(),
613                     rotationDegrees));
614         }
615         return adjustedCropRect;
616     }
617 
618     /**
619      * Gets the rotation that is compensated by the in-progress transformation.
620      *
621      * <p>If there's no in-progress recording, the returned rotation degrees will be the same as
622      * {@link #getRelativeRotation(CameraInternal)}.
623      */
getCompensatedRotation(@onNull CameraInternal cameraInternal)624     private int getCompensatedRotation(@NonNull CameraInternal cameraInternal) {
625         boolean isMirroringRequired = isMirroringRequired(cameraInternal);
626         int rotationDegrees = getRelativeRotation(cameraInternal, isMirroringRequired);
627         if (shouldCompensateTransformation()) {
628             TransformationInfo transformationInfo =
629                     requireNonNull(mStreamInfo.getInProgressTransformationInfo());
630             int inProgressDegrees = transformationInfo.getRotationDegrees();
631             if (isMirroringRequired != transformationInfo.isMirroring()) {
632                 // If the mirroring states of the current stream and the existing stream are
633                 // different, the existing rotation degrees should be inverted.
634                 inProgressDegrees = -inProgressDegrees;
635             }
636             rotationDegrees = within360(rotationDegrees - inProgressDegrees);
637         }
638         return rotationDegrees;
639     }
640 
adjustResolutionWithInProgressTransformation(@onNull Size resolution, @NonNull Rect originalCropRect, @NonNull Rect targetCropRect)641     private @NonNull Size adjustResolutionWithInProgressTransformation(@NonNull Size resolution,
642             @NonNull Rect originalCropRect, @NonNull Rect targetCropRect) {
643         Size nodeResolution = resolution;
644         if (shouldCompensateTransformation() && !targetCropRect.equals(originalCropRect)) {
645             float targetRatio = ((float) targetCropRect.height()) / originalCropRect.height();
646             nodeResolution = new Size((int) Math.ceil(resolution.getWidth() * targetRatio),
647                     (int) Math.ceil(resolution.getHeight() * targetRatio));
648         }
649         return nodeResolution;
650     }
651 
652     @VisibleForTesting
getCropRect()653     @Nullable Rect getCropRect() {
654         return mCropRect;
655     }
656 
657     @VisibleForTesting
getRotationDegrees()658     int getRotationDegrees() {
659         return mRotationDegrees;
660     }
661 
662     /**
663      * Calculates the crop rect.
664      *
665      * <p>Fall back to the full {@link Surface} rect if {@link ViewPort} crop rect is not
666      * available. The returned crop rect is adjusted if it is not valid to the video encoder.
667      */
calculateCropRect(@onNull Size surfaceResolution, @Nullable VideoEncoderInfo videoEncoderInfo)668     private @NonNull Rect calculateCropRect(@NonNull Size surfaceResolution,
669             @Nullable VideoEncoderInfo videoEncoderInfo) {
670         Rect cropRect;
671         if (getViewPortCropRect() != null) {
672             cropRect = getViewPortCropRect();
673         } else {
674             cropRect = new Rect(0, 0, surfaceResolution.getWidth(), surfaceResolution.getHeight());
675         }
676         if (videoEncoderInfo == null || videoEncoderInfo.isSizeSupportedAllowSwapping(
677                 cropRect.width(), cropRect.height())) {
678             return cropRect;
679         }
680         return adjustCropRectToValidSize(cropRect, surfaceResolution, videoEncoderInfo);
681     }
682 
683     @SuppressLint("WrongConstant")
684     @MainThread
createPipeline( @onNull VideoCaptureConfig<T> config, @NonNull StreamSpec streamSpec)685     private SessionConfig.@NonNull Builder createPipeline(
686             @NonNull VideoCaptureConfig<T> config,
687             @NonNull StreamSpec streamSpec) {
688         Threads.checkMainThread();
689         CameraInternal camera = Preconditions.checkNotNull(getCamera());
690         Size resolution = streamSpec.getResolution();
691 
692         // Currently, VideoCapture uses StreamInfo to handle requests for surface, so
693         // handleInvalidate() is not used. But if a different approach is asked in the future,
694         // handleInvalidate() can be used as an alternative.
695         Runnable onSurfaceInvalidated = this::notifyReset;
696         Range<Integer> expectedFrameRate = resolveFrameRate(streamSpec);
697         MediaSpec mediaSpec = requireNonNull(getMediaSpec());
698         VideoCapabilities videoCapabilities = getVideoCapabilities(camera.getCameraInfo(),
699                 streamSpec.getSessionType());
700         DynamicRange dynamicRange = streamSpec.getDynamicRange();
701         VideoValidatedEncoderProfilesProxy encoderProfiles =
702                 videoCapabilities.findNearestHigherSupportedEncoderProfilesFor(resolution,
703                         dynamicRange);
704         VideoEncoderInfo videoEncoderInfo = resolveVideoEncoderInfo(
705                 config.getVideoEncoderInfoFinder(), encoderProfiles, mediaSpec, dynamicRange);
706         mRotationDegrees = getCompensatedRotation(camera);
707         Rect originalCropRect = calculateCropRect(resolution, videoEncoderInfo);
708         mCropRect = adjustCropRectWithInProgressTransformation(originalCropRect, mRotationDegrees);
709         Size nodeResolution = adjustResolutionWithInProgressTransformation(resolution,
710                 originalCropRect, mCropRect);
711         if (shouldCompensateTransformation()) {
712             // If this pipeline is created with in-progress transformation, we need to reset the
713             // pipeline when the transformation becomes invalid.
714             mHasCompensatingTransformation = true;
715         }
716         mCropRect = adjustCropRectByQuirk(
717                 mCropRect,
718                 mRotationDegrees,
719                 isCreateNodeNeeded(camera, config, mCropRect, resolution, dynamicRange),
720                 videoEncoderInfo
721         );
722         mNode = createNodeIfNeeded(camera, config, mCropRect, resolution, dynamicRange);
723         boolean hasGlProcessing = !camera.getHasTransform() || mNode != null;
724         Timebase timebase = resolveTimebase(camera, mNode);
725         Logger.d(TAG, "camera timebase = " + camera.getCameraInfoInternal().getTimebase()
726                 + ", processing timebase = " + timebase);
727         // Update the StreamSpec with new frame rate range and resolution.
728         StreamSpec updatedStreamSpec =
729                 streamSpec.toBuilder()
730                         .setResolution(nodeResolution)
731                         .setExpectedFrameRateRange(expectedFrameRate)
732                         .build();
733         // Make sure the previously created camera edge is cleared before creating a new one.
734         checkState(mCameraEdge == null);
735         mCameraEdge = new SurfaceEdge(
736                 VIDEO_CAPTURE,
737                 INTERNAL_DEFINED_IMAGE_FORMAT_PRIVATE,
738                 updatedStreamSpec,
739                 getSensorToBufferTransformMatrix(),
740                 camera.getHasTransform(),
741                 mCropRect,
742                 mRotationDegrees,
743                 getAppTargetRotation(),
744                 shouldMirror(camera));
745         mCameraEdge.addOnInvalidatedListener(onSurfaceInvalidated);
746         if (mNode != null) {
747             OutConfig outConfig = OutConfig.of(mCameraEdge);
748             SurfaceProcessorNode.In nodeInput = SurfaceProcessorNode.In.of(
749                     mCameraEdge,
750                     singletonList(outConfig));
751             SurfaceProcessorNode.Out nodeOutput = mNode.transform(nodeInput);
752             SurfaceEdge appEdge = requireNonNull(nodeOutput.get(outConfig));
753             appEdge.addOnInvalidatedListener(
754                     () -> onAppEdgeInvalidated(appEdge, camera, config, timebase, hasGlProcessing));
755             mSurfaceRequest = appEdge.createSurfaceRequest(camera);
756             mDeferrableSurface = mCameraEdge.getDeferrableSurface();
757             DeferrableSurface latestDeferrableSurface = mDeferrableSurface;
758             mDeferrableSurface.getTerminationFuture().addListener(() -> {
759                 // If camera surface is the latest one, it means this pipeline can be abandoned.
760                 // Clear the pipeline in order to trigger the surface complete event to appSurface.
761                 if (latestDeferrableSurface == mDeferrableSurface) {
762                     clearPipeline();
763                 }
764             }, CameraXExecutors.mainThreadExecutor());
765         } else {
766             mSurfaceRequest = mCameraEdge.createSurfaceRequest(camera);
767             mDeferrableSurface = mSurfaceRequest.getDeferrableSurface();
768         }
769 
770         config.getVideoOutput().onSurfaceRequested(mSurfaceRequest, timebase, hasGlProcessing);
771         sendTransformationInfoIfReady();
772         // Since VideoCapture is in video module and can't be recognized by core module, use
773         // MediaCodec class instead.
774         mDeferrableSurface.setContainerClass(MediaCodec.class);
775 
776         SessionConfig.Builder sessionConfigBuilder = SessionConfig.Builder.createFrom(config,
777                 streamSpec.getResolution());
778         sessionConfigBuilder.setSessionType(streamSpec.getSessionType());
779         // Use the frame rate range directly from the StreamSpec here (don't resolve it to the
780         // default if unresolved).
781         // Applies the AE fps range to the session config builder according to the stream spec and
782         // quirk values.
783         applyExpectedFrameRateRange(sessionConfigBuilder, streamSpec);
784         sessionConfigBuilder.setVideoStabilization(config.getVideoStabilizationMode());
785         if (mCloseableErrorListener != null) {
786             mCloseableErrorListener.close();
787         }
788         mCloseableErrorListener = new SessionConfig.CloseableErrorListener(
789                 (sessionConfig, error) -> resetPipeline());
790         sessionConfigBuilder.setErrorListener(mCloseableErrorListener);
791         if (streamSpec.getImplementationOptions() != null) {
792             sessionConfigBuilder.addImplementationOptions(streamSpec.getImplementationOptions());
793         }
794 
795         return sessionConfigBuilder;
796     }
797 
onAppEdgeInvalidated(@onNull SurfaceEdge appEdge, @NonNull CameraInternal camera, @NonNull VideoCaptureConfig<T> config, @NonNull Timebase timebase, boolean hasGlProcessing)798     private void onAppEdgeInvalidated(@NonNull SurfaceEdge appEdge, @NonNull CameraInternal camera,
799             @NonNull VideoCaptureConfig<T> config, @NonNull Timebase timebase,
800             boolean hasGlProcessing) {
801         if (camera == getCamera()) {
802             mSurfaceRequest = appEdge.createSurfaceRequest(camera);
803             config.getVideoOutput().onSurfaceRequested(mSurfaceRequest, timebase, hasGlProcessing);
804             sendTransformationInfoIfReady();
805         }
806     }
807 
808     /**
809      * Clear the internal pipeline so that the pipeline can be set up again.
810      */
811     @MainThread
clearPipeline()812     private void clearPipeline() {
813         Threads.checkMainThread();
814 
815         // Closes the old error listener
816         if (mCloseableErrorListener != null) {
817             mCloseableErrorListener.close();
818             mCloseableErrorListener = null;
819         }
820 
821         if (mDeferrableSurface != null) {
822             mDeferrableSurface.close();
823             mDeferrableSurface = null;
824         }
825         if (mNode != null) {
826             mNode.release();
827             mNode = null;
828         }
829         if (mCameraEdge != null) {
830             mCameraEdge.close();
831             mCameraEdge = null;
832         }
833         mCropRect = null;
834         mSurfaceRequest = null;
835         mStreamInfo = StreamInfo.STREAM_INFO_ANY_INACTIVE;
836         mRotationDegrees = 0;
837         mHasCompensatingTransformation = false;
838     }
839 
840     @MainThread
841     @SuppressWarnings({"WeakerAccess", "unchecked"}) /* synthetic accessor */
resetPipeline()842     void resetPipeline() {
843         // Do nothing when the use case has been unbound.
844         if (getCamera() == null) {
845             return;
846         }
847 
848         clearPipeline();
849         mSessionConfigBuilder = createPipeline(
850                 (VideoCaptureConfig<T>) getCurrentConfig(),
851                 Preconditions.checkNotNull(getAttachedStreamSpec()));
852         applyStreamInfoAndStreamSpecToSessionConfigBuilder(mSessionConfigBuilder, mStreamInfo,
853                 getAttachedStreamSpec());
854         updateSessionConfig(List.of(mSessionConfigBuilder.build()));
855         notifyReset();
856     }
857 
858     /**
859      *
860      */
861     @VisibleForTesting
getCameraEdge()862     @Nullable SurfaceEdge getCameraEdge() {
863         return mCameraEdge;
864     }
865 
866     /**
867      * Provides a base static default configuration for the VideoCapture
868      *
869      * <p>These values may be overridden by the implementation. They only provide a minimum set of
870      * defaults that are implementation independent.
871      */
872     @RestrictTo(Scope.LIBRARY_GROUP)
873     public static final class Defaults implements ConfigProvider<VideoCaptureConfig<?>> {
874         /** Surface occupancy priority to this use case */
875         private static final int DEFAULT_SURFACE_OCCUPANCY_PRIORITY = 5;
876         private static final VideoOutput DEFAULT_VIDEO_OUTPUT =
877                 SurfaceRequest::willNotProvideSurface;
878         private static final VideoCaptureConfig<?> DEFAULT_CONFIG;
879 
880         private static final VideoEncoderInfo.Finder
881                 DEFAULT_VIDEO_ENCODER_INFO_FINDER = VideoEncoderInfoImpl.FINDER;
882 
883         static final Range<Integer> DEFAULT_FPS_RANGE = new Range<>(30, 30);
884 
885         /**
886          * Explicitly setting the default dynamic range to SDR (rather than UNSPECIFIED) means
887          * VideoCapture won't inherit dynamic ranges from other use cases.
888          */
889         static final DynamicRange DEFAULT_DYNAMIC_RANGE = DynamicRange.SDR;
890 
891         static {
892             Builder<?> builder = new Builder<>(DEFAULT_VIDEO_OUTPUT)
893                     .setSurfaceOccupancyPriority(DEFAULT_SURFACE_OCCUPANCY_PRIORITY)
894                     .setVideoEncoderInfoFinder(DEFAULT_VIDEO_ENCODER_INFO_FINDER)
895                     .setDynamicRange(DEFAULT_DYNAMIC_RANGE);
896 
897             DEFAULT_CONFIG = builder.getUseCaseConfig();
898         }
899 
900         @Override
getConfig()901         public @NonNull VideoCaptureConfig<?> getConfig() {
902             return DEFAULT_CONFIG;
903         }
904     }
905 
getMediaSpec()906     private @Nullable MediaSpec getMediaSpec() {
907         return fetchObservableValue(getOutput().getMediaSpec(), null);
908     }
909 
getVideoCapabilities(@onNull CameraInfo cameraInfo, int sessionType)910     private @NonNull VideoCapabilities getVideoCapabilities(@NonNull CameraInfo cameraInfo,
911             int sessionType) {
912         return getOutput().getMediaCapabilities(cameraInfo, sessionType);
913     }
914 
915     private final Observer<StreamInfo> mStreamInfoObserver = new Observer<StreamInfo>() {
916         @SuppressWarnings("unchecked")
917         @Override
918         public void onNewData(@Nullable StreamInfo streamInfo) {
919             if (streamInfo == null) {
920                 throw new IllegalArgumentException("StreamInfo can't be null");
921             }
922             if (mSourceState == VideoOutput.SourceState.INACTIVE) {
923                 // VideoCapture is unbound.
924                 return;
925             }
926             Logger.d(TAG, "Stream info update: old: " + mStreamInfo + " new: " + streamInfo);
927 
928             StreamInfo currentStreamInfo = mStreamInfo;
929             mStreamInfo = streamInfo;
930 
931             // Doing resetPipeline() includes notifyReset/notifyUpdated(). Doing NotifyReset()
932             // includes notifyUpdated(). So we just take actions on higher order item for
933             // optimization.
934             StreamSpec attachedStreamSpec = Preconditions.checkNotNull(getAttachedStreamSpec());
935             if (isStreamIdChanged(currentStreamInfo.getId(), streamInfo.getId())
936                     || shouldResetCompensatingTransformation(currentStreamInfo, streamInfo)) {
937                 // Reset pipeline if it's one of the following cases:
938                 // 1. The stream ids are different, which means there's a new surface ready to be
939                 // requested.
940                 // 2. The in-progress transformation info becomes null, which means a recording
941                 // has been finalized, and there's an existing compensating transformation.
942                 resetPipeline();
943             } else if ((currentStreamInfo.getId() != STREAM_ID_ERROR
944                     && streamInfo.getId() == STREAM_ID_ERROR)
945                     || (currentStreamInfo.getId() == STREAM_ID_ERROR
946                     && streamInfo.getId() != STREAM_ID_ERROR)) {
947                 // If id switch to STREAM_ID_ERROR, it means VideoOutput is failed to setup video
948                 // stream. The surface should be removed from camera. Vice versa.
949                 applyStreamInfoAndStreamSpecToSessionConfigBuilder(mSessionConfigBuilder,
950                         streamInfo,
951                         attachedStreamSpec);
952                 updateSessionConfig(List.of(mSessionConfigBuilder.build()));
953                 notifyReset();
954             } else if (currentStreamInfo.getStreamState() != streamInfo.getStreamState()) {
955                 applyStreamInfoAndStreamSpecToSessionConfigBuilder(mSessionConfigBuilder,
956                         streamInfo,
957                         attachedStreamSpec);
958                 updateSessionConfig(List.of(mSessionConfigBuilder.build()));
959                 notifyUpdated();
960             }
961         }
962 
963         @Override
964         public void onError(@NonNull Throwable t) {
965             Logger.w(TAG, "Receive onError from StreamState observer", t);
966         }
967     };
968 
969     /**
970      * Observes whether the source stream is required and updates source i.e. camera layer
971      * accordingly.
972      */
973     static class SourceStreamRequirementObserver implements Observer<Boolean> {
974         private @Nullable CameraControlInternal mCameraControl;
975 
976         private boolean mIsSourceStreamRequired = false;
977 
SourceStreamRequirementObserver(@onNull CameraControlInternal cameraControl)978         SourceStreamRequirementObserver(@NonNull CameraControlInternal cameraControl) {
979             mCameraControl = cameraControl;
980         }
981 
982         @MainThread
983         @Override
onNewData(@ullable Boolean value)984         public void onNewData(@Nullable Boolean value) {
985             checkState(isMainThread(),
986                     "SourceStreamRequirementObserver can be updated from main thread only");
987             updateVideoUsageInCamera(Boolean.TRUE.equals(value));
988         }
989 
990         @Override
onError(@onNull Throwable t)991         public void onError(@NonNull Throwable t) {
992             Logger.w(TAG, "SourceStreamRequirementObserver#onError", t);
993         }
994 
updateVideoUsageInCamera(boolean isRequired)995         private void updateVideoUsageInCamera(boolean isRequired) {
996             if (mIsSourceStreamRequired == isRequired) {
997                 return;
998             }
999             mIsSourceStreamRequired = isRequired;
1000             if (mCameraControl != null) {
1001                 if (mIsSourceStreamRequired) {
1002                     mCameraControl.incrementVideoUsage();
1003                 } else {
1004                     mCameraControl.decrementVideoUsage();
1005                 }
1006             } else {
1007                 Logger.d(TAG,
1008                         "SourceStreamRequirementObserver#isSourceStreamRequired: Received"
1009                                 + " new data despite being closed already");
1010             }
1011         }
1012 
1013         /**
1014          * Closes this object to detach the association with camera and updates recording status if
1015          * required.
1016          */
1017         @MainThread
close()1018         public void close() {
1019             checkState(isMainThread(),
1020                     "SourceStreamRequirementObserver can be closed from main thread only");
1021 
1022             Logger.d(TAG, "SourceStreamRequirementObserver#close: mIsSourceStreamRequired = "
1023                     + mIsSourceStreamRequired);
1024 
1025             if (mCameraControl == null) {
1026                 Logger.d(TAG, "SourceStreamRequirementObserver#close: Already closed!");
1027                 return;
1028             }
1029 
1030             // Before removing the camera, it should be updated about recording status
1031             updateVideoUsageInCamera(false);
1032             mCameraControl = null;
1033         }
1034     }
1035 
1036     @MainThread
1037     @SuppressWarnings("WeakerAccess") /* synthetic accessor */
applyStreamInfoAndStreamSpecToSessionConfigBuilder( SessionConfig.@onNull Builder sessionConfigBuilder, @NonNull StreamInfo streamInfo, @NonNull StreamSpec streamSpec)1038     void applyStreamInfoAndStreamSpecToSessionConfigBuilder(
1039             SessionConfig.@NonNull Builder sessionConfigBuilder,
1040             @NonNull StreamInfo streamInfo, @NonNull StreamSpec streamSpec) {
1041         final boolean isStreamError = streamInfo.getId() == StreamInfo.STREAM_ID_ERROR;
1042         final boolean isStreamActive = streamInfo.getStreamState() == StreamState.ACTIVE;
1043         if (isStreamError && isStreamActive) {
1044             throw new IllegalStateException(
1045                     "Unexpected stream state, stream is error but active");
1046         }
1047 
1048         sessionConfigBuilder.clearSurfaces();
1049         DynamicRange dynamicRange = streamSpec.getDynamicRange();
1050         if (!isStreamError && mDeferrableSurface != null) {
1051             if (isStreamActive) {
1052                 sessionConfigBuilder.addSurface(mDeferrableSurface,
1053                         dynamicRange,
1054                         null,
1055                         MirrorMode.MIRROR_MODE_UNSPECIFIED);
1056             } else {
1057                 sessionConfigBuilder.addNonRepeatingSurface(mDeferrableSurface, dynamicRange);
1058             }
1059         } // Don't attach surface when stream is invalid.
1060 
1061         setupSurfaceUpdateNotifier(sessionConfigBuilder, isStreamActive);
1062     }
1063 
isCreateNodeNeeded(@onNull CameraInternal camera, @NonNull VideoCaptureConfig<?> config, @NonNull Rect cropRect, @NonNull Size resolution, @NonNull DynamicRange dynamicRange )1064     private boolean isCreateNodeNeeded(@NonNull CameraInternal camera,
1065             @NonNull VideoCaptureConfig<?> config,
1066             @NonNull Rect cropRect,
1067             @NonNull Size resolution,
1068             @NonNull DynamicRange dynamicRange
1069     ) {
1070         return getEffect() != null
1071                 || shouldEnableSurfaceProcessingByConfig(camera, config)
1072                 || shouldEnableSurfaceProcessingByQuirk(camera)
1073                 || shouldEnableSurfaceProcessingBasedOnDynamicRangeByQuirk(camera, dynamicRange)
1074                 || shouldCrop(cropRect, resolution)
1075                 || shouldMirror(camera)
1076                 || shouldCompensateTransformation();
1077     }
1078 
createNodeIfNeeded(@onNull CameraInternal camera, @NonNull VideoCaptureConfig<T> config, @NonNull Rect cropRect, @NonNull Size resolution, @NonNull DynamicRange dynamicRange)1079     private @Nullable SurfaceProcessorNode createNodeIfNeeded(@NonNull CameraInternal camera,
1080             @NonNull VideoCaptureConfig<T> config,
1081             @NonNull Rect cropRect,
1082             @NonNull Size resolution,
1083             @NonNull DynamicRange dynamicRange) {
1084         if (isCreateNodeNeeded(camera, config, cropRect, resolution, dynamicRange)) {
1085             Logger.d(TAG, "Surface processing is enabled.");
1086             return new SurfaceProcessorNode(requireNonNull(getCamera()),
1087                     getEffect() != null ? getEffect().createSurfaceProcessorInternal() :
1088                             DefaultSurfaceProcessor.Factory.newInstance(dynamicRange));
1089         }
1090         return null;
1091     }
1092 
1093     @VisibleForTesting
getNode()1094     @Nullable SurfaceProcessorNode getNode() {
1095         return mNode;
1096     }
1097 
1098     /** Adjusts the cropRect if the quirk matches, otherwise returns the original cropRect. */
adjustCropRectByQuirk(@onNull Rect cropRect, int rotationDegrees, boolean isSurfaceProcessingEnabled, @Nullable VideoEncoderInfo videoEncoderInfo)1099     private static @NonNull Rect adjustCropRectByQuirk(@NonNull Rect cropRect, int rotationDegrees,
1100             boolean isSurfaceProcessingEnabled, @Nullable VideoEncoderInfo videoEncoderInfo) {
1101         SizeCannotEncodeVideoQuirk quirk = DeviceQuirks.get(SizeCannotEncodeVideoQuirk.class);
1102         if (quirk != null) {
1103             return quirk.adjustCropRectForProblematicEncodeSize(cropRect,
1104                     isSurfaceProcessingEnabled ? rotationDegrees : 0, videoEncoderInfo);
1105         }
1106         return cropRect;
1107     }
1108 
1109     /**
1110      * This method resizes the crop rectangle to a valid size.
1111      *
1112      * <p>The valid size must fulfill
1113      * <ul>
1114      * <li>The multiple of VideoEncoderInfo.getWidthAlignment()/getHeightAlignment() alignment</li>
1115      * <li>In the scope of Surface resolution and VideoEncoderInfo.getSupportedWidths()
1116      * /getSupportedHeights().</li>
1117      * </ul>
1118      *
1119      * <p>When the size is not a multiple of the alignment, it seeks to shrink or enlarge the size
1120      * with the smallest amount of change and ensures that the size is within the surface
1121      * resolution and supported widths and heights. The new cropping rectangle position (left,
1122      * right, top, and bottom) is then calculated by extending or indenting from the center of
1123      * the original cropping rectangle.
1124      */
1125     @SuppressWarnings("RedundantIfStatement")
adjustCropRectToValidSize(@onNull Rect cropRect, @NonNull Size resolution, @NonNull VideoEncoderInfo videoEncoderInfo)1126     private static @NonNull Rect adjustCropRectToValidSize(@NonNull Rect cropRect,
1127             @NonNull Size resolution, @NonNull VideoEncoderInfo videoEncoderInfo) {
1128         Logger.d(TAG, String.format("Adjust cropRect %s by width/height alignment %d/%d and "
1129                         + "supported widths %s / supported heights %s",
1130                 rectToString(cropRect),
1131                 videoEncoderInfo.getWidthAlignment(),
1132                 videoEncoderInfo.getHeightAlignment(),
1133                 videoEncoderInfo.getSupportedWidths(),
1134                 videoEncoderInfo.getSupportedHeights()
1135         ));
1136 
1137         boolean swapWidthHeightConstraints;
1138         if (videoEncoderInfo.getSupportedWidths().contains(cropRect.width())
1139                 && videoEncoderInfo.getSupportedHeights().contains(cropRect.height())) {
1140             swapWidthHeightConstraints = false;
1141         } else if (videoEncoderInfo.canSwapWidthHeight()
1142                 && videoEncoderInfo.getSupportedHeights().contains(cropRect.width())
1143                 && videoEncoderInfo.getSupportedWidths().contains(cropRect.height())) {
1144             swapWidthHeightConstraints = true;
1145         } else {
1146             // We may need a strategy when both width and height are not within supported widths
1147             // and heights. It should be a rare case and for now we leave it no swapping.
1148             swapWidthHeightConstraints = false;
1149         }
1150         if (swapWidthHeightConstraints) {
1151             videoEncoderInfo = new SwappedVideoEncoderInfo(videoEncoderInfo);
1152         }
1153 
1154         int widthAlignment = videoEncoderInfo.getWidthAlignment();
1155         int heightAlignment = videoEncoderInfo.getHeightAlignment();
1156         Range<Integer> supportedWidths = videoEncoderInfo.getSupportedWidths();
1157         Range<Integer> supportedHeights = videoEncoderInfo.getSupportedHeights();
1158 
1159         // Construct all up/down alignment combinations.
1160         int widthAlignedDown = alignDown(cropRect.width(), widthAlignment, supportedWidths);
1161         int widthAlignedUp = alignUp(cropRect.width(), widthAlignment, supportedWidths);
1162         int heightAlignedDown = alignDown(cropRect.height(), heightAlignment, supportedHeights);
1163         int heightAlignedUp = alignUp(cropRect.height(), heightAlignment, supportedHeights);
1164 
1165         // Use Set to filter out duplicates.
1166         Set<Size> candidateSet = new HashSet<>();
1167         addBySupportedSize(candidateSet, widthAlignedDown, heightAlignedDown, resolution,
1168                 videoEncoderInfo);
1169         addBySupportedSize(candidateSet, widthAlignedDown, heightAlignedUp, resolution,
1170                 videoEncoderInfo);
1171         addBySupportedSize(candidateSet, widthAlignedUp, heightAlignedDown, resolution,
1172                 videoEncoderInfo);
1173         addBySupportedSize(candidateSet, widthAlignedUp, heightAlignedUp, resolution,
1174                 videoEncoderInfo);
1175         if (candidateSet.isEmpty()) {
1176             Logger.w(TAG, "Can't find valid cropped size");
1177             return cropRect;
1178         }
1179         List<Size> candidatesList = new ArrayList<>(candidateSet);
1180         Logger.d(TAG, "candidatesList = " + candidatesList);
1181 
1182         // Find the smallest change in dimensions.
1183         //noinspection ComparatorCombinators - Suggestion by Comparator.comparingInt is for API24+
1184         Collections.sort(candidatesList,
1185                 (s1, s2) -> (Math.abs(s1.getWidth() - cropRect.width()) + Math.abs(
1186                         s1.getHeight() - cropRect.height()))
1187                         - (Math.abs(s2.getWidth() - cropRect.width()) + Math.abs(
1188                         s2.getHeight() - cropRect.height())));
1189         Logger.d(TAG, "sorted candidatesList = " + candidatesList);
1190         Size newSize = candidatesList.get(0);
1191         int newWidth = newSize.getWidth();
1192         int newHeight = newSize.getHeight();
1193 
1194         if (newWidth == cropRect.width() && newHeight == cropRect.height()) {
1195             Logger.d(TAG, "No need to adjust cropRect because crop size is valid.");
1196             return cropRect;
1197         }
1198 
1199         // New width/height should be multiple of 2 since VideoCapabilities.get*Alignment()
1200         // returns power of 2. This ensures width/2 and height/2 are not rounded off.
1201         // New width/height smaller than resolution ensures calculated cropRect never exceeds
1202         // the resolution.
1203         checkState(newWidth % 2 == 0 && newHeight % 2 == 0
1204                 && newWidth <= resolution.getWidth() && newHeight <= resolution.getHeight());
1205         Rect newCropRect = new Rect(cropRect);
1206         if (newWidth != cropRect.width()) {
1207             // Note: When the width/height of cropRect is odd number, Rect.centerX/Y() will be
1208             // offset to the left/top by 0.5.
1209             newCropRect.left = Math.max(0, cropRect.centerX() - newWidth / 2);
1210             newCropRect.right = newCropRect.left + newWidth;
1211             if (newCropRect.right > resolution.getWidth()) {
1212                 newCropRect.right = resolution.getWidth();
1213                 newCropRect.left = newCropRect.right - newWidth;
1214             }
1215         }
1216         if (newHeight != cropRect.height()) {
1217             newCropRect.top = Math.max(0, cropRect.centerY() - newHeight / 2);
1218             newCropRect.bottom = newCropRect.top + newHeight;
1219             if (newCropRect.bottom > resolution.getHeight()) {
1220                 newCropRect.bottom = resolution.getHeight();
1221                 newCropRect.top = newCropRect.bottom - newHeight;
1222             }
1223         }
1224         Logger.d(TAG, String.format("Adjust cropRect from %s to %s", rectToString(cropRect),
1225                 rectToString(newCropRect)));
1226         return newCropRect;
1227     }
1228 
addBySupportedSize(@onNull Set<Size> candidates, int width, int height, @NonNull Size resolution, @NonNull VideoEncoderInfo videoEncoderInfo)1229     private static void addBySupportedSize(@NonNull Set<Size> candidates, int width, int height,
1230             @NonNull Size resolution, @NonNull VideoEncoderInfo videoEncoderInfo) {
1231         if (width > resolution.getWidth() || height > resolution.getHeight()) {
1232             return;
1233         }
1234         try {
1235             Range<Integer> supportedHeights = videoEncoderInfo.getSupportedHeightsFor(width);
1236             candidates.add(new Size(width, supportedHeights.clamp(height)));
1237         } catch (IllegalArgumentException e) {
1238             Logger.w(TAG, "No supportedHeights for width: " + width, e);
1239         }
1240         try {
1241             Range<Integer> supportedWidths = videoEncoderInfo.getSupportedWidthsFor(height);
1242             candidates.add(new Size(supportedWidths.clamp(width), height));
1243         } catch (IllegalArgumentException e) {
1244             Logger.w(TAG, "No supportedWidths for height: " + height, e);
1245         }
1246     }
1247 
1248     @SuppressWarnings("WeakerAccess") /* synthetic accessor */
isStreamIdChanged(int currentId, int newId)1249     boolean isStreamIdChanged(int currentId, int newId) {
1250         return !StreamInfo.NON_SURFACE_STREAM_ID.contains(currentId)
1251                 && !StreamInfo.NON_SURFACE_STREAM_ID.contains(newId)
1252                 && currentId != newId;
1253     }
1254 
1255     @SuppressWarnings("WeakerAccess") /* synthetic accessor */
shouldResetCompensatingTransformation(@onNull StreamInfo currentStreamInfo, @NonNull StreamInfo streamInfo)1256     boolean shouldResetCompensatingTransformation(@NonNull StreamInfo currentStreamInfo,
1257             @NonNull StreamInfo streamInfo) {
1258         return mHasCompensatingTransformation
1259                 && currentStreamInfo.getInProgressTransformationInfo() != null
1260                 && streamInfo.getInProgressTransformationInfo() == null;
1261     }
1262 
shouldMirror(@onNull CameraInternal camera)1263     private boolean shouldMirror(@NonNull CameraInternal camera) {
1264         // Stream is always mirrored during buffer copy. If there has been a buffer copy, it
1265         // means the input stream is already mirrored. Otherwise, mirror it as needed.
1266         return camera.getHasTransform() && isMirroringRequired(camera);
1267     }
1268 
shouldCompensateTransformation()1269     private boolean shouldCompensateTransformation() {
1270         return mStreamInfo.getInProgressTransformationInfo() != null;
1271     }
1272 
shouldCrop(@onNull Rect cropRect, @NonNull Size resolution)1273     private static boolean shouldCrop(@NonNull Rect cropRect, @NonNull Size resolution) {
1274         return resolution.getWidth() != cropRect.width()
1275                 || resolution.getHeight() != cropRect.height();
1276     }
1277 
shouldEnableSurfaceProcessingByConfig( @onNull CameraInternal camera, @NonNull VideoCaptureConfig<T> config)1278     private static <T extends VideoOutput> boolean shouldEnableSurfaceProcessingByConfig(
1279             @NonNull CameraInternal camera, @NonNull VideoCaptureConfig<T> config) {
1280         // If there has been a buffer copy, it means the surface processing is already enabled on
1281         // input stream. Otherwise, enable it as needed.
1282         return camera.getHasTransform() && config.isSurfaceProcessingForceEnabled();
1283     }
1284 
shouldEnableSurfaceProcessingByQuirk(@onNull CameraInternal camera)1285     private static boolean shouldEnableSurfaceProcessingByQuirk(@NonNull CameraInternal camera) {
1286         // If there has been a buffer copy, it means the surface processing is already enabled on
1287         // input stream. Otherwise, enable it as needed.
1288         return camera.getHasTransform() && (workaroundBySurfaceProcessing(DeviceQuirks.getAll())
1289                 || workaroundBySurfaceProcessing(camera.getCameraInfoInternal().getCameraQuirks()));
1290     }
1291 
shouldEnableSurfaceProcessingBasedOnDynamicRangeByQuirk( @onNull CameraInternal camera, @NonNull DynamicRange dynamicRange)1292     private static boolean shouldEnableSurfaceProcessingBasedOnDynamicRangeByQuirk(
1293             @NonNull CameraInternal camera, @NonNull DynamicRange dynamicRange) {
1294         HdrRepeatingRequestFailureQuirk quirk = DeviceQuirks.get(
1295                 HdrRepeatingRequestFailureQuirk.class);
1296         // If there has been a buffer copy, it means the surface processing is already enabled on
1297         // input stream. Otherwise, enable it as needed.
1298         return camera.getHasTransform() && quirk != null
1299                 && quirk.workaroundBySurfaceProcessing(dynamicRange);
1300     }
1301 
alignDown(int length, int alignment, @NonNull Range<Integer> supportedLength)1302     private static int alignDown(int length, int alignment,
1303             @NonNull Range<Integer> supportedLength) {
1304         return align(true, length, alignment, supportedLength);
1305     }
1306 
alignUp(int length, int alignment, @NonNull Range<Integer> supportedRange)1307     private static int alignUp(int length, int alignment,
1308             @NonNull Range<Integer> supportedRange) {
1309         return align(false, length, alignment, supportedRange);
1310     }
1311 
align(boolean alignDown, int length, int alignment, @NonNull Range<Integer> supportedRange)1312     private static int align(boolean alignDown, int length, int alignment,
1313             @NonNull Range<Integer> supportedRange) {
1314         int remainder = length % alignment;
1315         int newLength;
1316         if (remainder == 0) {
1317             newLength = length;
1318         } else if (alignDown) {
1319             newLength = length - remainder;
1320         } else {
1321             newLength = length + (alignment - remainder);
1322         }
1323         // Clamp new length by supportedRange, which is supposed to be valid length.
1324         return supportedRange.clamp(newLength);
1325     }
1326 
resolveTimebase(@onNull CameraInternal camera, @Nullable SurfaceProcessorNode node)1327     private static @NonNull Timebase resolveTimebase(@NonNull CameraInternal camera,
1328             @Nullable SurfaceProcessorNode node) {
1329         // Choose Timebase based on the whether the buffer is copied.
1330         Timebase timebase;
1331         if (node != null || !camera.getHasTransform()) {
1332             timebase = camera.getCameraInfoInternal().getTimebase();
1333         } else {
1334             // When camera buffers from a REALTIME device are passed directly to a video encoder
1335             // from the camera, automatic compensation is done to account for differing timebases
1336             // of the audio and camera subsystems. See the document of
1337             // CameraMetadata#SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME. So the timebase is always
1338             // UPTIME when encoder surface is directly sent to camera.
1339             timebase = Timebase.UPTIME;
1340         }
1341         return timebase;
1342     }
1343 
resolveFrameRate(@onNull StreamSpec streamSpec)1344     private static @NonNull Range<Integer> resolveFrameRate(@NonNull StreamSpec streamSpec) {
1345         // If the expected frame rate range is unspecified, we need to give an educated estimate
1346         // on what frame rate the camera will be operating at. For most devices this is a
1347         // constant frame rate of 30fps, but in the future this could probably be queried from
1348         // the camera.
1349         Range<Integer> frameRate = streamSpec.getExpectedFrameRateRange();
1350         if (Objects.equals(frameRate, FRAME_RATE_RANGE_UNSPECIFIED)) {
1351             frameRate = Defaults.DEFAULT_FPS_RANGE;
1352         }
1353         return frameRate;
1354     }
1355 
resolveVideoEncoderInfo( VideoEncoderInfo.@onNull Finder videoEncoderInfoFinder, @Nullable VideoValidatedEncoderProfilesProxy encoderProfiles, @NonNull MediaSpec mediaSpec, @NonNull DynamicRange dynamicRange)1356     private static @Nullable VideoEncoderInfo resolveVideoEncoderInfo(
1357             VideoEncoderInfo.@NonNull Finder videoEncoderInfoFinder,
1358             @Nullable VideoValidatedEncoderProfilesProxy encoderProfiles,
1359             @NonNull MediaSpec mediaSpec,
1360             @NonNull DynamicRange dynamicRange) {
1361         VideoMimeInfo videoMimeInfo = resolveVideoMimeInfo(mediaSpec, dynamicRange,
1362                 encoderProfiles);
1363 
1364         VideoEncoderInfo videoEncoderInfo = videoEncoderInfoFinder.find(
1365                 videoMimeInfo.getMimeType());
1366         if (videoEncoderInfo == null) {
1367             // If VideoCapture cannot find videoEncoderInfo, it means that VideoOutput should
1368             // also not be able to find the encoder. VideoCapture will not handle this situation
1369             // and leave it to VideoOutput to respond.
1370             Logger.w(TAG, "Can't find videoEncoderInfo");
1371             return null;
1372         }
1373 
1374         Size profileSize = encoderProfiles != null
1375                 ? encoderProfiles.getDefaultVideoProfile().getResolution() : null;
1376         return VideoEncoderInfoWrapper.from(videoEncoderInfo, profileSize);
1377     }
1378 
1379     @MainThread
setupSurfaceUpdateNotifier(SessionConfig.@onNull Builder sessionConfigBuilder, boolean isStreamActive)1380     private void setupSurfaceUpdateNotifier(SessionConfig.@NonNull Builder sessionConfigBuilder,
1381             boolean isStreamActive) {
1382         if (mSurfaceUpdateFuture != null) {
1383             // A newer update is issued before the previous update is completed. Cancel the
1384             // previous future.
1385             if (mSurfaceUpdateFuture.cancel(false)) {
1386                 Logger.d(TAG,
1387                         "A newer surface update is requested. Previous surface update cancelled.");
1388             }
1389         }
1390 
1391         ListenableFuture<Void> surfaceUpdateFuture = mSurfaceUpdateFuture =
1392                 CallbackToFutureAdapter.getFuture(completer -> {
1393                     // Use the completer as the tag to identify the update.
1394                     sessionConfigBuilder.addTag(SURFACE_UPDATE_KEY, completer.hashCode());
1395                     AtomicBoolean surfaceUpdateComplete = new AtomicBoolean(false);
1396                     CameraCaptureCallback cameraCaptureCallback =
1397                             new CameraCaptureCallback() {
1398                                 private boolean mIsFirstCaptureResult = true;
1399                                 @Override
1400                                 public void onCaptureCompleted(int captureConfigId,
1401                                         @NonNull CameraCaptureResult cameraCaptureResult) {
1402                                     super.onCaptureCompleted(captureConfigId, cameraCaptureResult);
1403                                     // Only print the first result to avoid flooding the log.
1404                                     if (mIsFirstCaptureResult) {
1405                                         mIsFirstCaptureResult = false;
1406                                         Logger.d(TAG, "cameraCaptureResult timestampNs = "
1407                                                 + cameraCaptureResult.getTimestamp()
1408                                                 + ", current system uptimeMs = "
1409                                                 + SystemClock.uptimeMillis()
1410                                                 + ", current system realtimeMs = "
1411                                                 + SystemClock.elapsedRealtime());
1412                                     }
1413                                     if (!surfaceUpdateComplete.get()) {
1414                                         Object tag = cameraCaptureResult.getTagBundle().getTag(
1415                                                 SURFACE_UPDATE_KEY);
1416                                         if (tag != null
1417                                                 && (int) tag == completer.hashCode()
1418                                                 && completer.set(null)
1419                                                 && !surfaceUpdateComplete.getAndSet(true)) {
1420                                             // Remove from builder so this callback doesn't get
1421                                             // added to future SessionConfigs
1422                                             CameraXExecutors.mainThreadExecutor().execute(() ->
1423                                                     sessionConfigBuilder
1424                                                             .removeCameraCaptureCallback(this));
1425                                         }
1426                                     }
1427                                 }
1428                             };
1429                     completer.addCancellationListener(() -> {
1430                         checkState(isMainThread(), "Surface update "
1431                                 + "cancellation should only occur on main thread.");
1432                         surfaceUpdateComplete.set(true);
1433                         sessionConfigBuilder.removeCameraCaptureCallback(cameraCaptureCallback);
1434                     }, CameraXExecutors.directExecutor());
1435                     sessionConfigBuilder.addRepeatingCameraCaptureCallback(cameraCaptureCallback);
1436 
1437                     return String.format("%s[0x%x]", SURFACE_UPDATE_KEY, completer.hashCode());
1438                 });
1439 
1440         Futures.addCallback(surfaceUpdateFuture, new FutureCallback<Void>() {
1441             @Override
1442             public void onSuccess(@Nullable Void result) {
1443                 // If there is a new surface update request, we will wait to update the video
1444                 // output until that update is complete.
1445                 // Also, if the source state is inactive, then we are detached and should not tell
1446                 // the video output we're active.
1447                 if (surfaceUpdateFuture == mSurfaceUpdateFuture
1448                         && mSourceState != VideoOutput.SourceState.INACTIVE) {
1449                     setSourceState(isStreamActive ? VideoOutput.SourceState.ACTIVE_STREAMING
1450                             : VideoOutput.SourceState.ACTIVE_NON_STREAMING);
1451                 }
1452             }
1453 
1454             @Override
1455             public void onFailure(@NonNull Throwable t) {
1456                 if (!(t instanceof CancellationException)) {
1457                     Logger.e(TAG, "Surface update completed with unexpected exception", t);
1458                 }
1459             }
1460         }, CameraXExecutors.mainThreadExecutor());
1461     }
1462 
1463     /**
1464      * Set {@link ImageOutputConfig#OPTION_CUSTOM_ORDERED_RESOLUTIONS} according to the resolution
1465      * found by the {@link QualitySelector} in VideoOutput.
1466      *
1467      * @throws IllegalArgumentException if not able to find a resolution by the QualitySelector
1468      *                                  in VideoOutput.
1469      */
1470     @SuppressWarnings("unchecked") // Cast to VideoCaptureConfig<T>
updateCustomOrderedResolutionsByQuality(@onNull CameraInfoInternal cameraInfo, UseCaseConfig.@NonNull Builder<?, ?, ?> builder)1471     private void updateCustomOrderedResolutionsByQuality(@NonNull CameraInfoInternal cameraInfo,
1472             UseCaseConfig.@NonNull Builder<?, ?, ?> builder) throws IllegalArgumentException {
1473         MediaSpec mediaSpec = getMediaSpec();
1474 
1475         Preconditions.checkArgument(mediaSpec != null,
1476                 "Unable to update target resolution by null MediaSpec.");
1477 
1478         DynamicRange requestedDynamicRange = getDynamicRange();
1479         Range<Integer> targetHighSpeedFrameRate =
1480                 builder.getUseCaseConfig().getTargetHighSpeedFrameRate(
1481                         FRAME_RATE_RANGE_UNSPECIFIED);
1482         int sessionType = !FRAME_RATE_RANGE_UNSPECIFIED.equals(targetHighSpeedFrameRate)
1483                 ? SESSION_TYPE_HIGH_SPEED : SESSION_TYPE_REGULAR;
1484         VideoCapabilities videoCapabilities = getVideoCapabilities(cameraInfo, sessionType);
1485 
1486         // Get supported qualities.
1487         List<Quality> supportedQualities = videoCapabilities.getSupportedQualities(
1488                 requestedDynamicRange);
1489         Logger.d(TAG, "supportedQualities = " + supportedQualities);
1490         if (supportedQualities.isEmpty()) {
1491             // When the device does not have any supported quality, even the most flexible
1492             // QualitySelector such as QualitySelector.from(Quality.HIGHEST), still cannot
1493             // find any resolution. This should be a rare case but will cause VideoCapture
1494             // to always fail to bind. The workaround is not set any resolution and leave it to
1495             // auto resolution mechanism.
1496             Logger.w(TAG, "Can't find any supported quality on the device.");
1497             return;
1498         }
1499 
1500         // Get selected qualities.
1501         VideoSpec videoSpec = mediaSpec.getVideoSpec();
1502         QualitySelector qualitySelector = videoSpec.getQualitySelector();
1503         List<Quality> selectedQualities = qualitySelector.getPrioritizedQualities(
1504                 supportedQualities);
1505         Logger.d(TAG, "Found selectedQualities " + selectedQualities + " by " + qualitySelector);
1506         if (selectedQualities.isEmpty()) {
1507             throw new IllegalArgumentException(
1508                     "Unable to find supported quality by QualitySelector");
1509         }
1510 
1511         // Get corresponded resolutions for the target aspect ratio.
1512         int aspectRatio = videoSpec.getAspectRatio();
1513         Map<Quality, Size> supportedQualityToSizeMap = getQualityToResolutionMap(videoCapabilities,
1514                 requestedDynamicRange);
1515         QualityRatioToResolutionsTable qualityRatioTable = new QualityRatioToResolutionsTable(
1516                 cameraInfo.getSupportedResolutions(getImageFormat()), supportedQualityToSizeMap);
1517         // Use LinkedHashMap to maintain the order.
1518         LinkedHashMap<Quality, List<Size>> orderedQualityToSizesMap = new LinkedHashMap<>();
1519         for (Quality selectedQuality : selectedQualities) {
1520             orderedQualityToSizesMap.put(selectedQuality,
1521                     qualityRatioTable.getResolutions(selectedQuality, aspectRatio));
1522         }
1523         LinkedHashMap<Quality, List<Size>> filteredOrderedQualityToSizesMap =
1524                 filterOutEncoderUnsupportedResolutions(
1525                         (VideoCaptureConfig<T>) builder.getUseCaseConfig(), mediaSpec,
1526                         requestedDynamicRange, videoCapabilities, orderedQualityToSizesMap,
1527                         supportedQualityToSizeMap);
1528         List<Size> filteredCustomOrderedResolutions = new ArrayList<>();
1529         for (List<Size> resolutions : filteredOrderedQualityToSizesMap.values()) {
1530             filteredCustomOrderedResolutions.addAll(resolutions);
1531         }
1532         Logger.d(TAG, "Set custom ordered resolutions = " + filteredCustomOrderedResolutions);
1533         builder.getMutableConfig().insertOption(OPTION_CUSTOM_ORDERED_RESOLUTIONS,
1534                 filteredCustomOrderedResolutions);
1535         mQualityToCustomSizesMap = filteredOrderedQualityToSizesMap;
1536     }
1537 
1538     private static @NonNull LinkedHashMap<Quality, List<Size>>
filterOutEncoderUnsupportedResolutions( @onNull VideoCaptureConfig<?> config, @NonNull MediaSpec mediaSpec, @NonNull DynamicRange dynamicRange, @NonNull VideoCapabilities videoCapabilities, @NonNull LinkedHashMap<Quality, List<Size>> qualityToSizesOrderedMap, @NonNull Map<Quality, Size> supportedQualityToSizeMap )1539             filterOutEncoderUnsupportedResolutions(
1540             @NonNull VideoCaptureConfig<?> config,
1541             @NonNull MediaSpec mediaSpec,
1542             @NonNull DynamicRange dynamicRange,
1543             @NonNull VideoCapabilities videoCapabilities,
1544             @NonNull LinkedHashMap<Quality, List<Size>> qualityToSizesOrderedMap,
1545             @NonNull Map<Quality, Size> supportedQualityToSizeMap
1546     ) {
1547         if (qualityToSizesOrderedMap.isEmpty()) {
1548             return new LinkedHashMap<>();
1549         }
1550 
1551         LinkedHashMap<Quality, List<Size>> filteredQualityToSizesOrderedMap = new LinkedHashMap<>();
1552         for (Map.Entry<Quality, List<Size>> entry : qualityToSizesOrderedMap.entrySet()) {
1553             // Copy the size list first and filter out the unsupported resolutions.
1554             List<Size> filteredSizeList = new ArrayList<>(entry.getValue());
1555             Iterator<Size> sizeIterator = filteredSizeList.iterator();
1556             while (sizeIterator.hasNext()) {
1557                 Size resolution = sizeIterator.next();
1558                 // To improve performance, there is no need to check for supported qualities'
1559                 // resolutions because the encoder should support them.
1560                 if (supportedQualityToSizeMap.containsValue(resolution)) {
1561                     continue;
1562                 }
1563                 // We must find EncoderProfiles for each resolution because the EncoderProfiles
1564                 // found by resolution may contain different video mine type which leads to
1565                 // different codec.
1566                 VideoValidatedEncoderProfilesProxy encoderProfiles =
1567                         videoCapabilities.findNearestHigherSupportedEncoderProfilesFor(resolution,
1568                                 dynamicRange);
1569                 if (encoderProfiles == null) {
1570                     continue;
1571                 }
1572                 // If the user set a non-fully specified target DynamicRange, there could be
1573                 // multiple videoProfiles that matches to the DynamicRange. Find the one with the
1574                 // largest supported size as a workaround.
1575                 // If the suggested StreamSpec(i.e. DynamicRange + resolution) is unfortunately over
1576                 // codec supported size, then rely on surface processing (OpenGL) to resize the
1577                 // camera stream.
1578                 VideoEncoderInfo videoEncoderInfo = findLargestSupportedSizeVideoEncoderInfo(
1579                         config.getVideoEncoderInfoFinder(), encoderProfiles, dynamicRange,
1580                         mediaSpec);
1581                 if (videoEncoderInfo != null && !videoEncoderInfo.isSizeSupportedAllowSwapping(
1582                         resolution.getWidth(), resolution.getHeight())) {
1583                     sizeIterator.remove();
1584                 }
1585             }
1586 
1587             // Put the filtered size list only when it is not empty.
1588             if (!filteredSizeList.isEmpty()) {
1589                 filteredQualityToSizesOrderedMap.put(entry.getKey(), filteredSizeList);
1590             }
1591         }
1592         return filteredQualityToSizesOrderedMap;
1593     }
1594 
findLargestSupportedSizeVideoEncoderInfo( VideoEncoderInfo.@onNull Finder videoEncoderInfoFinder, @NonNull VideoValidatedEncoderProfilesProxy encoderProfiles, @NonNull DynamicRange dynamicRange, @NonNull MediaSpec mediaSpec)1595     private static @Nullable VideoEncoderInfo findLargestSupportedSizeVideoEncoderInfo(
1596             VideoEncoderInfo.@NonNull Finder videoEncoderInfoFinder,
1597             @NonNull VideoValidatedEncoderProfilesProxy encoderProfiles,
1598             @NonNull DynamicRange dynamicRange,
1599             @NonNull MediaSpec mediaSpec) {
1600         if (dynamicRange.isFullySpecified()) {
1601             return resolveVideoEncoderInfo(videoEncoderInfoFinder, encoderProfiles, mediaSpec,
1602                     dynamicRange);
1603         }
1604         // There could be multiple VideoProfiles that match the non-fully specified DynamicRange.
1605         // The one with the largest supported size will be returned.
1606         VideoEncoderInfo sizeLargestVideoEncoderInfo = null;
1607         int largestArea = Integer.MIN_VALUE;
1608         for (EncoderProfilesProxy.VideoProfileProxy videoProfile :
1609                 encoderProfiles.getVideoProfiles()) {
1610             if (isHdrSettingsMatched(videoProfile, dynamicRange)) {
1611                 DynamicRange profileDynamicRange = new DynamicRange(
1612                         videoProfileHdrFormatsToDynamicRangeEncoding(videoProfile.getHdrFormat()),
1613                         videoProfileBitDepthToDynamicRangeBitDepth(videoProfile.getBitDepth()));
1614                 VideoEncoderInfo videoEncoderInfo =
1615                         resolveVideoEncoderInfo(videoEncoderInfoFinder, encoderProfiles, mediaSpec,
1616                                 profileDynamicRange);
1617                 if (videoEncoderInfo == null) {
1618                     continue;
1619                 }
1620                 // Compare by area size.
1621                 int area = getArea(videoEncoderInfo.getSupportedWidths().getUpper(),
1622                         videoEncoderInfo.getSupportedHeights().getUpper());
1623                 if (area > largestArea) {
1624                     largestArea = area;
1625                     sizeLargestVideoEncoderInfo = videoEncoderInfo;
1626                 }
1627             }
1628         }
1629         return sizeLargestVideoEncoderInfo;
1630     }
1631 
1632     /**
1633      * Finds the Quality with the size closest to the target size based on area.
1634      *
1635      * @param sizeMap The map of Quality to a list of Size`s.
1636      * @param targetSize The target size to compare against.
1637      * @return The Quality with the closest size, or `null` if no match is found.
1638      */
findNearestSizeFor( @onNull Map<Quality, List<Size>> sizeMap, @NonNull Size targetSize)1639     private static @Nullable Quality findNearestSizeFor(
1640             @NonNull Map<Quality, List<Size>> sizeMap, @NonNull Size targetSize) {
1641         int targetArea = getArea(targetSize);
1642         Quality nearestQuality = null;
1643         int minAreaDiff = Integer.MAX_VALUE;
1644 
1645         for (Map.Entry<Quality, List<Size>> entry : sizeMap.entrySet()) {
1646             for (Size size : entry.getValue()) {
1647                 int areaDiff = Math.abs(getArea(size) - targetArea);
1648                 if (areaDiff < minAreaDiff) {
1649                     minAreaDiff = areaDiff;
1650                     nearestQuality = entry.getKey();
1651                 }
1652             }
1653         }
1654 
1655         return nearestQuality;
1656     }
1657 
1658     /**
1659      * Gets the snapshot value of the given {@link Observable}.
1660      *
1661      * <p>Note: Set {@code valueIfMissing} to a non-{@code null} value doesn't mean the method
1662      * will never return a {@code null} value. The observable could contain exact {@code null}
1663      * value.
1664      *
1665      * @param observable     the observable
1666      * @param valueIfMissing if the observable doesn't contain value.
1667      * @param <T>            the value type
1668      * @return the snapshot value of the given {@link Observable}.
1669      */
fetchObservableValue(@onNull Observable<T> observable, @Nullable T valueIfMissing)1670     private static <T> @Nullable T fetchObservableValue(@NonNull Observable<T> observable,
1671             @Nullable T valueIfMissing) {
1672         ListenableFuture<T> future = observable.fetchData();
1673         if (!future.isDone()) {
1674             return valueIfMissing;
1675         }
1676         try {
1677             return future.get();
1678         } catch (ExecutionException | InterruptedException e) {
1679             // Should not happened
1680             throw new IllegalStateException(e);
1681         }
1682     }
1683 
1684     @SuppressWarnings("WeakerAccess") // synthetic accessor
1685     @MainThread
setSourceState(VideoOutput.@onNull SourceState newState)1686     void setSourceState(VideoOutput.@NonNull SourceState newState) {
1687         VideoOutput.SourceState oldState = mSourceState;
1688         if (newState != oldState) {
1689             mSourceState = newState;
1690             getOutput().onSourceStateChanged(newState);
1691         }
1692     }
1693 
1694     @VisibleForTesting
getSurfaceRequest()1695     @NonNull SurfaceRequest getSurfaceRequest() {
1696         return requireNonNull(mSurfaceRequest);
1697     }
1698 
1699     /**
1700      * @inheritDoc
1701      */
1702     @RestrictTo(Scope.LIBRARY_GROUP)
1703     @Override
getSupportedEffectTargets()1704     public @NonNull Set<Integer> getSupportedEffectTargets() {
1705         Set<Integer> targets = new HashSet<>();
1706         targets.add(VIDEO_CAPTURE);
1707         return targets;
1708     }
1709 
1710     /**
1711      * Builder for a {@link VideoCapture}.
1712      *
1713      * @param <T> the type of VideoOutput
1714      */
1715     @SuppressWarnings("ObjectToString")
1716     public static final class Builder<T extends VideoOutput> implements
1717             UseCaseConfig.Builder<VideoCapture<T>, VideoCaptureConfig<T>, Builder<T>>,
1718             ImageOutputConfig.Builder<Builder<T>>, ImageInputConfig.Builder<Builder<T>>,
1719             ThreadConfig.Builder<Builder<T>> {
1720         private final MutableOptionsBundle mMutableConfig;
1721 
1722         /** Creates a new Builder object. */
Builder(@onNull T videoOutput)1723         public Builder(@NonNull T videoOutput) {
1724             this(createInitialBundle(videoOutput));
1725         }
1726 
1727         @SuppressWarnings("unchecked")
Builder(@onNull MutableOptionsBundle mutableConfig)1728         private Builder(@NonNull MutableOptionsBundle mutableConfig) {
1729             mMutableConfig = mutableConfig;
1730 
1731             if (!mMutableConfig.containsOption(OPTION_VIDEO_OUTPUT)) {
1732                 throw new IllegalArgumentException("VideoOutput is required");
1733             }
1734 
1735             Class<?> oldConfigClass =
1736                     mutableConfig.retrieveOption(OPTION_TARGET_CLASS, null);
1737             if (oldConfigClass != null && !oldConfigClass.equals(VideoCapture.class)) {
1738                 throw new IllegalArgumentException(
1739                         "Invalid target class configuration for "
1740                                 + Builder.this
1741                                 + ": "
1742                                 + oldConfigClass);
1743             }
1744 
1745             setCaptureType(UseCaseConfigFactory.CaptureType.VIDEO_CAPTURE);
1746             setTargetClass((Class<VideoCapture<T>>) (Type) VideoCapture.class);
1747         }
1748 
1749         @RestrictTo(Scope.LIBRARY_GROUP)
fromConfig(@onNull Config configuration)1750         static @NonNull Builder<? extends VideoOutput> fromConfig(@NonNull Config configuration) {
1751             return new Builder<>(MutableOptionsBundle.from(configuration));
1752         }
1753 
1754         /**
1755          * Generates a Builder from another Config object
1756          *
1757          * @param configuration An immutable configuration to pre-populate this builder.
1758          * @return The new Builder.
1759          */
1760         @RestrictTo(Scope.LIBRARY_GROUP)
fromConfig( @onNull VideoCaptureConfig<T> configuration)1761         public static <T extends VideoOutput> @NonNull Builder<T> fromConfig(
1762                 @NonNull VideoCaptureConfig<T> configuration) {
1763             return new Builder<>(MutableOptionsBundle.from(configuration));
1764         }
1765 
createInitialBundle( @onNull T videoOutput)1766         private static <T extends VideoOutput> @NonNull MutableOptionsBundle createInitialBundle(
1767                 @NonNull T videoOutput) {
1768             MutableOptionsBundle bundle = MutableOptionsBundle.create();
1769             bundle.insertOption(OPTION_VIDEO_OUTPUT, videoOutput);
1770             return bundle;
1771         }
1772 
1773         /**
1774          * {@inheritDoc}
1775          */
1776         @RestrictTo(Scope.LIBRARY_GROUP)
1777         @Override
getMutableConfig()1778         public @NonNull MutableConfig getMutableConfig() {
1779             return mMutableConfig;
1780         }
1781 
1782         /**
1783          * {@inheritDoc}
1784          */
1785         @RestrictTo(Scope.LIBRARY_GROUP)
1786         @Override
getUseCaseConfig()1787         public @NonNull VideoCaptureConfig<T> getUseCaseConfig() {
1788             return new VideoCaptureConfig<>(OptionsBundle.from(mMutableConfig));
1789         }
1790 
1791         /** Sets the associated {@link VideoOutput}. */
1792         @RestrictTo(Scope.LIBRARY_GROUP)
setVideoOutput(@onNull VideoOutput videoOutput)1793         public @NonNull Builder<T> setVideoOutput(@NonNull VideoOutput videoOutput) {
1794             getMutableConfig().insertOption(OPTION_VIDEO_OUTPUT, videoOutput);
1795             return this;
1796         }
1797 
setVideoEncoderInfoFinder( VideoEncoderInfo.@onNull Finder videoEncoderInfoFinder)1798         @NonNull Builder<T> setVideoEncoderInfoFinder(
1799                 VideoEncoderInfo.@NonNull Finder videoEncoderInfoFinder) {
1800             getMutableConfig().insertOption(OPTION_VIDEO_ENCODER_INFO_FINDER,
1801                     videoEncoderInfoFinder);
1802             return this;
1803         }
1804 
1805         /**
1806          * Builds a {@link VideoCapture} from the current state.
1807          *
1808          * @return A {@link VideoCapture} populated with the current state.
1809          */
1810         @Override
build()1811         public @NonNull VideoCapture<T> build() {
1812             return new VideoCapture<>(getUseCaseConfig());
1813         }
1814 
1815         // Implementations of TargetConfig.Builder default methods
1816 
1817         @RestrictTo(Scope.LIBRARY_GROUP)
1818         @Override
setTargetClass(@onNull Class<VideoCapture<T>> targetClass)1819         public @NonNull Builder<T> setTargetClass(@NonNull Class<VideoCapture<T>> targetClass) {
1820             getMutableConfig().insertOption(OPTION_TARGET_CLASS, targetClass);
1821 
1822             // If no name is set yet, then generate a unique name
1823             if (null == getMutableConfig().retrieveOption(OPTION_TARGET_NAME, null)) {
1824                 String targetName = targetClass.getCanonicalName() + "-" + UUID.randomUUID();
1825                 setTargetName(targetName);
1826             }
1827 
1828             return this;
1829         }
1830 
1831         /**
1832          * Sets the name of the target object being configured, used only for debug logging.
1833          *
1834          * <p>The name should be a value that can uniquely identify an instance of the object being
1835          * configured.
1836          *
1837          * <p>If not set, the target name will default to an unique name automatically generated
1838          * with the class canonical name and random UUID.
1839          *
1840          * @param targetName A unique string identifier for the instance of the class being
1841          *                   configured.
1842          * @return the current Builder.
1843          */
1844         @RestrictTo(Scope.LIBRARY_GROUP)
1845         @Override
setTargetName(@onNull String targetName)1846         public @NonNull Builder<T> setTargetName(@NonNull String targetName) {
1847             getMutableConfig().insertOption(OPTION_TARGET_NAME, targetName);
1848             return this;
1849         }
1850 
1851         // Implementations of ImageOutputConfig.Builder default methods
1852 
1853         /**
1854          * setTargetAspectRatio is not supported on VideoCapture
1855          *
1856          * <p>To set aspect ratio, see {@link Recorder.Builder#setAspectRatio(int)}.
1857          */
1858         @RestrictTo(Scope.LIBRARY_GROUP)
1859         @Override
setTargetAspectRatio(@spectRatio.Ratio int aspectRatio)1860         public @NonNull Builder<T> setTargetAspectRatio(@AspectRatio.Ratio int aspectRatio) {
1861             throw new UnsupportedOperationException("setTargetAspectRatio is not supported.");
1862         }
1863 
1864         /**
1865          * Sets the rotation of the intended target for images from this configuration.
1866          *
1867          * <p>Valid values include: {@link Surface#ROTATION_0}, {@link Surface#ROTATION_90},
1868          * {@link Surface#ROTATION_180}, {@link Surface#ROTATION_270}.
1869          * Rotation values are relative to the "natural" rotation, {@link Surface#ROTATION_0}.
1870          *
1871          * <p>In general, it is best to additionally set the target rotation dynamically on the
1872          * use case. See {@link VideoCapture#setTargetRotation(int)} for additional
1873          * documentation.
1874          *
1875          * <p>If not set, the target rotation will default to the value of
1876          * {@link Display#getRotation()} of the default display at the time the use case is bound.
1877          *
1878          * <p>For a {@link Recorder} output, the final rotation degrees of the video, including
1879          * the degrees set by this method and the orientation of the camera sensor, will be
1880          * reflected by several possibilities, 1) the rotation degrees is written into the video
1881          * metadata, 2) the video content is directly rotated, 3) both, i.e. rotation metadata
1882          * and rotated video content which combines to the target rotation. CameraX will choose a
1883          * strategy according to the use case.
1884          *
1885          * @param rotation The rotation of the intended target.
1886          * @return The current Builder.
1887          * @see VideoCapture#setTargetRotation(int)
1888          * @see android.view.OrientationEventListener
1889          */
1890         @Override
setTargetRotation(@otationValue int rotation)1891         public @NonNull Builder<T> setTargetRotation(@RotationValue int rotation) {
1892             getMutableConfig().insertOption(OPTION_TARGET_ROTATION, rotation);
1893             return this;
1894         }
1895 
1896         /**
1897          * Sets the mirror mode.
1898          *
1899          * <p>Valid values include: {@link MirrorMode#MIRROR_MODE_OFF},
1900          * {@link MirrorMode#MIRROR_MODE_ON} and {@link MirrorMode#MIRROR_MODE_ON_FRONT_ONLY}.
1901          * If not set, it defaults to {@link MirrorMode#MIRROR_MODE_OFF}.
1902          *
1903          * <p>This API only changes the mirroring behavior on VideoCapture, but does not affect
1904          * other UseCases. If the application wants to be consistent with the default
1905          * {@link Preview} behavior where the rear camera is not mirrored but the front camera is
1906          * mirrored, then {@link MirrorMode#MIRROR_MODE_ON_FRONT_ONLY} is recommended.
1907          *
1908          * @param mirrorMode The mirror mode of the intended target.
1909          * @return The current Builder.
1910          */
1911         @Override
setMirrorMode(@irrorMode.Mirror int mirrorMode)1912         public @NonNull Builder<T> setMirrorMode(@MirrorMode.Mirror int mirrorMode) {
1913             getMutableConfig().insertOption(OPTION_MIRROR_MODE, mirrorMode);
1914             return this;
1915         }
1916 
1917         /**
1918          * setTargetResolution is not supported on VideoCapture
1919          *
1920          * <p>To set resolution, see {@link Recorder.Builder#setQualitySelector(QualitySelector)}.
1921          */
1922         @RestrictTo(Scope.LIBRARY_GROUP)
1923         @Override
setTargetResolution(@onNull Size resolution)1924         public @NonNull Builder<T> setTargetResolution(@NonNull Size resolution) {
1925             throw new UnsupportedOperationException("setTargetResolution is not supported.");
1926         }
1927 
1928         /**
1929          * Sets the default resolution of the intended target from this configuration.
1930          *
1931          * @param resolution The default resolution to choose from supported output sizes list.
1932          * @return The current Builder.
1933          */
1934         @RestrictTo(Scope.LIBRARY_GROUP)
1935         @Override
setDefaultResolution(@onNull Size resolution)1936         public @NonNull Builder<T> setDefaultResolution(@NonNull Size resolution) {
1937             getMutableConfig().insertOption(OPTION_DEFAULT_RESOLUTION, resolution);
1938             return this;
1939         }
1940 
1941         @RestrictTo(Scope.LIBRARY_GROUP)
1942         @Override
setMaxResolution(@onNull Size resolution)1943         public @NonNull Builder<T> setMaxResolution(@NonNull Size resolution) {
1944             getMutableConfig().insertOption(OPTION_MAX_RESOLUTION, resolution);
1945             return this;
1946         }
1947 
1948         @RestrictTo(Scope.LIBRARY_GROUP)
1949         @Override
setSupportedResolutions( @onNull List<Pair<Integer, Size[]>> resolutions)1950         public @NonNull Builder<T> setSupportedResolutions(
1951                 @NonNull List<Pair<Integer, Size[]>> resolutions) {
1952             getMutableConfig().insertOption(OPTION_SUPPORTED_RESOLUTIONS, resolutions);
1953             return this;
1954         }
1955 
1956         @RestrictTo(Scope.LIBRARY_GROUP)
1957         @Override
setCustomOrderedResolutions(@onNull List<Size> resolutions)1958         public @NonNull Builder<T> setCustomOrderedResolutions(@NonNull List<Size> resolutions) {
1959             getMutableConfig().insertOption(OPTION_CUSTOM_ORDERED_RESOLUTIONS, resolutions);
1960             return this;
1961         }
1962 
1963         @RestrictTo(Scope.LIBRARY_GROUP)
1964         @Override
setResolutionSelector( @onNull ResolutionSelector resolutionSelector)1965         public @NonNull Builder<T> setResolutionSelector(
1966                 @NonNull ResolutionSelector resolutionSelector) {
1967             getMutableConfig().insertOption(OPTION_RESOLUTION_SELECTOR, resolutionSelector);
1968             return this;
1969         }
1970 
1971         // Implementations of ImageInputConfig.Builder default methods
1972 
1973         /**
1974          * Sets the {@link DynamicRange}.
1975          *
1976          * <p>The dynamic range specifies how the range of colors, highlights and shadows that
1977          * are captured by the video producer are displayed on a display. Some dynamic ranges will
1978          * allow the video to make full use of the extended range of brightness of a display when
1979          * the video is played back.
1980          *
1981          * <p>The supported dynamic ranges for video capture depend on the capabilities of the
1982          * camera and the {@link VideoOutput}. The supported dynamic ranges can normally be
1983          * queried through the specific video output. For example, the available dynamic
1984          * ranges for the {@link Recorder} video output can be queried through
1985          * the {@link androidx.camera.video.VideoCapabilities} returned by
1986          * {@link Recorder#getVideoCapabilities(CameraInfo)} via
1987          * {@link androidx.camera.video.VideoCapabilities#getSupportedDynamicRanges()}.
1988          *
1989          * <p>It is possible to choose a high dynamic range (HDR) with unspecified encoding by
1990          * providing {@link DynamicRange#HDR_UNSPECIFIED_10_BIT}.
1991          *
1992          * <p>If the dynamic range is not provided, the returned video capture use case will use
1993          * a default of {@link DynamicRange#SDR}.
1994          *
1995          * @return The current Builder.
1996          * @see DynamicRange
1997          */
1998         @Override
setDynamicRange(@onNull DynamicRange dynamicRange)1999         public @NonNull Builder<T> setDynamicRange(@NonNull DynamicRange dynamicRange) {
2000             getMutableConfig().insertOption(OPTION_INPUT_DYNAMIC_RANGE, dynamicRange);
2001             return this;
2002         }
2003 
2004         // Implementations of ThreadConfig.Builder default methods
2005 
2006         /**
2007          * Sets the default executor that will be used for background tasks.
2008          *
2009          * <p>If not set, the background executor will default to an automatically generated
2010          * {@link Executor}.
2011          *
2012          * @param executor The executor which will be used for background tasks.
2013          * @return the current Builder.
2014          */
2015         @RestrictTo(Scope.LIBRARY_GROUP)
2016         @Override
setBackgroundExecutor(@onNull Executor executor)2017         public @NonNull Builder<T> setBackgroundExecutor(@NonNull Executor executor) {
2018             getMutableConfig().insertOption(OPTION_BACKGROUND_EXECUTOR, executor);
2019             return this;
2020         }
2021 
2022         // Implementations of UseCaseConfig.Builder default methods
2023 
2024         @RestrictTo(Scope.LIBRARY_GROUP)
2025         @Override
setDefaultSessionConfig(@onNull SessionConfig sessionConfig)2026         public @NonNull Builder<T> setDefaultSessionConfig(@NonNull SessionConfig sessionConfig) {
2027             getMutableConfig().insertOption(OPTION_DEFAULT_SESSION_CONFIG, sessionConfig);
2028             return this;
2029         }
2030 
2031         @RestrictTo(Scope.LIBRARY_GROUP)
2032         @Override
setDefaultCaptureConfig(@onNull CaptureConfig captureConfig)2033         public @NonNull Builder<T> setDefaultCaptureConfig(@NonNull CaptureConfig captureConfig) {
2034             getMutableConfig().insertOption(OPTION_DEFAULT_CAPTURE_CONFIG, captureConfig);
2035             return this;
2036         }
2037 
2038         @RestrictTo(Scope.LIBRARY_GROUP)
2039         @Override
setSessionOptionUnpacker( SessionConfig.@onNull OptionUnpacker optionUnpacker)2040         public @NonNull Builder<T> setSessionOptionUnpacker(
2041                 SessionConfig.@NonNull OptionUnpacker optionUnpacker) {
2042             getMutableConfig().insertOption(OPTION_SESSION_CONFIG_UNPACKER, optionUnpacker);
2043             return this;
2044         }
2045 
2046         @RestrictTo(Scope.LIBRARY_GROUP)
2047         @Override
setCaptureOptionUnpacker( CaptureConfig.@onNull OptionUnpacker optionUnpacker)2048         public @NonNull Builder<T> setCaptureOptionUnpacker(
2049                 CaptureConfig.@NonNull OptionUnpacker optionUnpacker) {
2050             getMutableConfig().insertOption(OPTION_CAPTURE_CONFIG_UNPACKER, optionUnpacker);
2051             return this;
2052         }
2053 
2054         @RestrictTo(Scope.LIBRARY_GROUP)
2055         @Override
setSurfaceOccupancyPriority(int priority)2056         public @NonNull Builder<T> setSurfaceOccupancyPriority(int priority) {
2057             getMutableConfig().insertOption(OPTION_SURFACE_OCCUPANCY_PRIORITY, priority);
2058             return this;
2059         }
2060 
2061         @RestrictTo(Scope.LIBRARY_GROUP)
2062         @Override
setZslDisabled(boolean disabled)2063         public @NonNull Builder<T> setZslDisabled(boolean disabled) {
2064             getMutableConfig().insertOption(OPTION_ZSL_DISABLED, disabled);
2065             return this;
2066         }
2067 
2068         @RestrictTo(Scope.LIBRARY_GROUP)
2069         @Override
setHighResolutionDisabled(boolean disabled)2070         public @NonNull Builder<T> setHighResolutionDisabled(boolean disabled) {
2071             getMutableConfig().insertOption(OPTION_HIGH_RESOLUTION_DISABLED, disabled);
2072             return this;
2073         }
2074 
2075         /**
2076          * Sets the target frame rate range in frames per second for the associated VideoCapture
2077          * use case.
2078          *
2079          * <p>This target will be used as a part of the heuristics for the algorithm that determines
2080          * the final frame rate range and resolution of all concurrently bound use cases.
2081          *
2082          * <p>It is not guaranteed that this target frame rate will be the final range,
2083          * as other use cases as well as frame rate restrictions of the device may affect the
2084          * outcome of the algorithm that chooses the actual frame rate.
2085          *
2086          * <p>For supported frame rates, see {@link CameraInfo#getSupportedFrameRateRanges()}.
2087          *
2088          * @param targetFrameRate the target frame rate range.
2089          */
setTargetFrameRate(@onNull Range<Integer> targetFrameRate)2090         public @NonNull Builder<T> setTargetFrameRate(@NonNull Range<Integer> targetFrameRate) {
2091             getMutableConfig().insertOption(OPTION_TARGET_FRAME_RATE, targetFrameRate);
2092             return this;
2093         }
2094 
2095         /**
2096          * Enable video stabilization.
2097          *
2098          * <p>It will enable stabilization for the video capture use case. However, it is not
2099          * guaranteed the stabilization will be enabled for the preview use case. If you want to
2100          * enable preview stabilization, use
2101          * {@link Preview.Builder#setPreviewStabilizationEnabled(boolean)} instead.
2102          *
2103          * <p>Preview stabilization, where streams are stabilized with the same quality of
2104          * stabilization for {@link Preview} and {@link VideoCapture} use cases, is enabled. This
2105          * mode aims to give clients a 'what you see is what you get' effect. In this mode, the
2106          * FoV reduction will be a maximum of 20 % both horizontally and vertically (10% from
2107          * left, right, top, bottom) for the given zoom ratio / crop region. The resultant FoV
2108          * will also be the same across all use cases (that have the same aspect ratio). This is
2109          * the tradeoff between video stabilization and preview stabilization.
2110          *
2111          * <p>It is recommended to query the device capability via
2112          * {@link VideoCapabilities#isStabilizationSupported()} before enabling this
2113          * feature, otherwise HAL error might be thrown.
2114          *
2115          * <p> If both preview stabilization and video stabilization are enabled or disabled, the
2116          * final result will be
2117          *
2118          * <p>
2119          * <table>
2120          * <tr> <th id="rb">Preview</th> <th id="rb">VideoCapture</th>   <th id="rb">Result</th>
2121          * </tr>
2122          * <tr> <td>ON</td> <td>ON</td> <td>Both Preview and VideoCapture will be stabilized,
2123          * VideoCapture quality might be worse than only VideoCapture stabilized</td>
2124          * </tr>
2125          * <tr> <td>ON</td> <td>OFF</td> <td>None of Preview and VideoCapture will be
2126          * stabilized</td>  </tr>
2127          * <tr> <td>ON</td> <td>NOT SPECIFIED</td> <td>Both Preview and VideoCapture will be
2128          * stabilized</td>  </tr>
2129          * <tr> <td>OFF</td> <td>ON</td> <td>None of Preview and VideoCapture will be
2130          * stabilized</td>  </tr>
2131          * <tr> <td>OFF</td> <td>OFF</td> <td>None of Preview and VideoCapture will be
2132          * stabilized</td>  </tr>
2133          * <tr> <td>OFF</td> <td>NOT SPECIFIED</td> <td>None of Preview and VideoCapture will be
2134          * stabilized</td>  </tr>
2135          * <tr> <td>NOT SPECIFIED</td> <td>ON</td> <td>Only VideoCapture will be stabilized,
2136          * Preview might be stabilized depending on devices</td>
2137          * </tr>
2138          * <tr> <td>NOT SPECIFIED</td> <td>OFF</td> <td>None of Preview and VideoCapture will be
2139          * stabilized</td>  </tr>
2140          * </table><br>
2141          *
2142          * @param enabled True if enable, otherwise false.
2143          * @return the current Builder.
2144          *
2145          * @see VideoCapabilities#isStabilizationSupported()
2146          * @see Preview.Builder#setPreviewStabilizationEnabled(boolean)
2147          */
setVideoStabilizationEnabled(boolean enabled)2148         public @NonNull Builder<T> setVideoStabilizationEnabled(boolean enabled) {
2149             getMutableConfig().insertOption(OPTION_VIDEO_STABILIZATION_MODE,
2150                     enabled ? StabilizationMode.ON : StabilizationMode.OFF);
2151             return this;
2152         }
2153 
2154         @RestrictTo(Scope.LIBRARY_GROUP)
2155         @Override
setCaptureType( UseCaseConfigFactory.@onNull CaptureType captureType)2156         public @NonNull Builder<T> setCaptureType(
2157                 UseCaseConfigFactory.@NonNull CaptureType captureType) {
2158             getMutableConfig().insertOption(OPTION_CAPTURE_TYPE, captureType);
2159             return this;
2160         }
2161 
2162         /**
2163          * Forces surface processing to be enabled.
2164          *
2165          * <p>Typically, surface processing is automatically enabled only when required for a
2166          * specific effect. However, calling this method will force it to be enabled even if no
2167          * effect is required. Surface processing creates additional processing through the OpenGL
2168          * pipeline, affecting performance and memory usage. Camera service may treat the surface
2169          * differently, potentially impacting video quality and stabilization. So it is generally
2170          * not recommended to enable it.
2171          *
2172          * <p>One example where it might be useful is to work around device compatibility issues.
2173          * For example, UHD video recording might not work on some devices, but enabling surface
2174          * processing could work around the issue.
2175          */
2176         @RestrictTo(Scope.LIBRARY_GROUP)
setSurfaceProcessingForceEnabled()2177         public @NonNull Builder<T> setSurfaceProcessingForceEnabled() {
2178             getMutableConfig().insertOption(OPTION_FORCE_ENABLE_SURFACE_PROCESSING, true);
2179             return this;
2180         }
2181     }
2182 }
2183