1 /*
2  * Copyright 2023 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package androidx.camera.effects;
18 
19 import static androidx.camera.effects.internal.Utils.lockCanvas;
20 
21 import android.graphics.Canvas;
22 import android.graphics.Matrix;
23 import android.graphics.Rect;
24 import android.graphics.SurfaceTexture;
25 import android.util.Size;
26 import android.view.Surface;
27 
28 import androidx.annotation.IntRange;
29 import androidx.annotation.RestrictTo;
30 import androidx.camera.core.ImageAnalysis;
31 import androidx.camera.core.ImageInfo;
32 import androidx.camera.core.Preview;
33 import androidx.camera.core.SurfaceRequest;
34 
35 import com.google.auto.value.AutoValue;
36 
37 import org.jspecify.annotations.NonNull;
38 import org.jspecify.annotations.Nullable;
39 
40 /**
41  * Represents a frame that is about to be rendered.
42  *
43  * <p>Use this class to draw overlay on camera output. It contains a {@link Canvas} for the
44  * drawing. It also provides metadata for positioning the overlay correctly, including
45  * sensor-to-buffer transform, size, crop rect, rotation, mirroring, and timestamp.
46  */
47 @AutoValue
48 public abstract class Frame {
49 
50     private @NonNull Surface mOverlaySurface;
51     private @Nullable Canvas mOverlayCanvas;
52 
53     /**
54      * Internal API to create a frame.
55      */
56     @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
of( @onNull Surface overlaySurface, long timestampNanos, @NonNull Size size, SurfaceRequest.@NonNull TransformationInfo transformationInfo)57     public static @NonNull Frame of(
58             @NonNull Surface overlaySurface,
59             long timestampNanos,
60             @NonNull Size size,
61             SurfaceRequest.@NonNull TransformationInfo transformationInfo) {
62         Frame frame = new AutoValue_Frame(transformationInfo.getSensorToBufferTransform(), size,
63                 transformationInfo.getCropRect(), transformationInfo.getRotationDegrees(),
64                 transformationInfo.isMirroring(), timestampNanos);
65         frame.mOverlaySurface = overlaySurface;
66         return frame;
67     }
68 
69     /**
70      * Returns the sensor to image buffer transform matrix.
71      *
72      * <p>The value is a mapping from sensor coordinates to buffer coordinates, which is,
73      * from the rect of the camera sensor to the rect defined by {@code (0, 0, #getSize()
74      * #getWidth(), #getSize()#getHeight())}.
75      *
76      * <p>The value can be set on the {@link Canvas} using {@link Canvas#setMatrix} API. This
77      * transforms the {@link Canvas} to the sensor coordinate system.
78      *
79      * @see SurfaceRequest.TransformationInfo#getSensorToBufferTransform()
80      */
getSensorToBufferTransform()81     public abstract @NonNull Matrix getSensorToBufferTransform();
82 
83     /**
84      * Returns the resolution of the frame.
85      *
86      * <p>This is the size of the input {@link SurfaceTexture} created by the effect.
87      *
88      * @see SurfaceRequest#getResolution()
89      */
getSize()90     public abstract @NonNull Size getSize();
91 
92     /**
93      * Returns the crop rect.
94      *
95      * <p>The value represents how CameraX intends to crop the input frame. The crop rect specifies
96      * the region of valid pixels in the frame, using coordinates from (0, 0) to the (width,
97      * height) of {@link #getSize()}. Only the overlay drawn within the bound of the crop rect
98      * will be visible to the end users.
99      *
100      * <p>The crop rect is applied before the rotating and mirroring. The order of the operations
101      * is as follows: 1) cropping, 2) rotating and 3) mirroring.
102      *
103      * @see SurfaceRequest.TransformationInfo#getCropRect()
104      */
getCropRect()105     public abstract @NonNull Rect getCropRect();
106 
107     /**
108      * Returns the rotation degrees of the frame.
109      *
110      * <p>This is a clockwise rotation in degrees that needs to be applied to the frame. The
111      * rotation will be determined by camera sensor orientation and UseCase configuration
112      * such as {@link Preview#setTargetRotation}. The app must draw the overlay according to the
113      * rotation degrees to ensure it is displayed correctly to the end users. For example, to
114      * overlay a text, make sure the text's orientation is aligned with the rotation degrees.
115      *
116      * <p>The rotation is applied after the cropping but before the mirroring. The order of the
117      * operations is as follows: 1) cropping, 2) rotating and 3) mirroring.
118      *
119      * @see SurfaceRequest.TransformationInfo#getRotationDegrees()
120      */
121     @IntRange(from = 0, to = 359)
getRotationDegrees()122     public abstract int getRotationDegrees();
123 
124     /**
125      * Returns whether the buffer will be mirrored.
126      *
127      * <p>This flag indicates whether the buffer will be mirrored across the vertical
128      * axis by the pipeline. For example, for front camera preview, the buffer is usually
129      * mirrored before displayed to end users.
130      *
131      * <p>The mirroring is applied after the cropping and the rotating. The order of the
132      * operations is as follows: 1) cropping, 2) rotating and 3) mirroring.
133      *
134      * @see SurfaceRequest.TransformationInfo#isMirroring()
135      */
isMirroring()136     public abstract boolean isMirroring();
137 
138     /**
139      * Returns the timestamp of the frame in nanoseconds.
140      *
141      * <p>This value will match the frames from other streams. For example, for a
142      * {@link ImageAnalysis} output that is originated from the same frame, this value will match
143      * the value of {@link ImageInfo#getTimestamp()}.
144      *
145      * @see SurfaceTexture#getTimestamp()
146      * @see ImageInfo#getTimestamp()
147      */
getTimestampNanos()148     public abstract long getTimestampNanos();
149 
150     /**
151      * Get the canvas for drawing the overlay.
152      *
153      * <p>Call this method to get the {@link Canvas} for drawing an overlay on top of the frame.
154      * The {@link Canvas} is backed by a {@link SurfaceTexture} with a size equal to
155      * {@link #getSize()}. To draw object in camera sensor coordinates, apply
156      * {@link #getSensorToBufferTransform()} via {@link Canvas#setMatrix(Matrix)} before drawing.
157      *
158      * <p>Using this method introduces wait times to synchronize frame updates. The caller should
159      * only invoke this method when it needs to draw overlay. For example, when an object is
160      * detected in the frame.
161      */
getOverlayCanvas()162     public @NonNull Canvas getOverlayCanvas() {
163         if (mOverlayCanvas == null) {
164             mOverlayCanvas = lockCanvas(mOverlaySurface);
165         }
166         return mOverlayCanvas;
167     }
168 
169     /**
170      * Internal API to check whether the overlay canvas has been drawn into.
171      */
172     @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
isOverlayDirty()173     public boolean isOverlayDirty() {
174         return mOverlayCanvas != null;
175     }
176 }
177