1 /*
2  * Copyright 2020 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package androidx.camera.integration.core;
18 
19 import static androidx.camera.core.impl.utils.TransformUtils.within360;
20 
21 import android.graphics.Rect;
22 import android.graphics.RectF;
23 import android.graphics.SurfaceTexture;
24 import android.opengl.Matrix;
25 import android.os.Process;
26 import android.util.Log;
27 import android.util.Size;
28 import android.view.Surface;
29 
30 import androidx.annotation.IntDef;
31 import androidx.annotation.MainThread;
32 import androidx.annotation.WorkerThread;
33 import androidx.camera.core.DynamicRange;
34 import androidx.camera.core.Preview;
35 import androidx.concurrent.futures.CallbackToFutureAdapter;
36 import androidx.core.util.Consumer;
37 import androidx.core.util.Pair;
38 
39 import com.google.common.util.concurrent.ListenableFuture;
40 
41 import org.jspecify.annotations.NonNull;
42 import org.jspecify.annotations.Nullable;
43 
44 import java.lang.annotation.Retention;
45 import java.lang.annotation.RetentionPolicy;
46 import java.util.Locale;
47 import java.util.Objects;
48 import java.util.Set;
49 import java.util.concurrent.Executor;
50 import java.util.concurrent.RejectedExecutionException;
51 import java.util.concurrent.atomic.AtomicInteger;
52 
53 final class OpenGLRenderer {
54     private static final String TAG = "OpenGLRenderer";
55     private static final boolean DEBUG = false;
56 
57     static {
58         System.loadLibrary("opengl_renderer_jni");
59     }
60 
61     // -----------------------------------------------------------------------------------------//
62     // Dynamic range encoding constants for renderer.
63     // These need to be kept in sync with the equivalent constants in opengl_renderer_jni.cpp
64     private static final int RENDER_DYN_RNG_SDR = DynamicRange.ENCODING_SDR;
65     private static final int RENDER_DYN_RNG_HDR_HLG = DynamicRange.ENCODING_HLG;
66     // TODO(b/303675500): Support HDR10/HDR10+ dynamic range
67     // -----------------------------------------------------------------------------------------//
68 
69     @IntDef({RENDER_DYN_RNG_SDR, RENDER_DYN_RNG_HDR_HLG})
70     @Retention(RetentionPolicy.SOURCE)
71     public @interface RendererDynamicRange {
72     }
73 
74     private static final AtomicInteger RENDERER_COUNT = new AtomicInteger(0);
75     private final SingleThreadHandlerExecutor mExecutor =
76             new SingleThreadHandlerExecutor(
77                     String.format(Locale.US, "GLRenderer-%03d", RENDERER_COUNT.incrementAndGet()),
78                     Process.THREAD_PRIORITY_DEFAULT); // Use UI thread priority (DEFAULT)
79 
80     private SurfaceTexture mPreviewTexture;
81     private RectF mPreviewCropRect;
82     private boolean mIsPreviewCropRectPrecalculated = false;
83     private Size mPreviewSize;
84     private int mTextureRotationDegrees;
85     private int mSurfaceRequestRotationDegrees;
86     private boolean mHasCameraTransform;
87     // Transform retrieved by SurfaceTexture.getTransformMatrix
88     private final float[] mTextureTransform = new float[16];
89 
90     // The Model represent the surface we are drawing on. In 3D, it is a flat rectangle.
91     private final float[] mModelTransform = new float[16];
92 
93     private final float[] mViewTransform = new float[16];
94 
95     private final float[] mProjectionTransform = new float[16];
96 
97     // A combination of the model, view and projection transform matrices.
98     private final float[] mMvpTransform = new float[16];
99     private boolean mMvpDirty = true;
100 
101     private Size mSurfaceSize = null;
102     private int mSurfaceRotationDegrees = 0;
103 
104     private int mRendererDynamicRange = RENDER_DYN_RNG_SDR;
105     private int mRendererBitDepth = 8;
106 
107     // Vectors defining the 'up' direction for the 4 angles we're interested in. These are based
108     // off our world-space coordinate system (sensor coordinates), where the origin (0, 0) is in
109     // the upper left of the image, and rotations are clockwise (left-handed coordinates).
110     private static final float[] DIRECTION_UP_ROT_0 = {0f, -1f, 0f, 0f};
111     private static final float[] DIRECTION_UP_ROT_90 = {1f, 0f, 0f, 0f};
112     private static final float[] DIRECTION_UP_ROT_180 = {0f, 1f, 0f, 0f};
113     private static final float[] DIRECTION_UP_ROT_270 = {-1f, 0f, 0f, 0f};
114     private float[] mTempVec = new float[4];
115     private float[] mTempMatrix = new float[32]; // 2 concatenated matrices for calculations
116 
117     private long mNativeContext = 0;
118 
119     private boolean mIsShutdown = false;
120     private int mNumOutstandingSurfaces = 0;
121 
122     private Pair<Executor, Consumer<Long>> mFrameUpdateListener;
123 
124     private final Set<DynamicRange> mHighDynamicRangesSupportedByOutput;
125 
OpenGLRenderer(@onNull Set<DynamicRange> highDynamicRangesSupportedByOutput)126     OpenGLRenderer(@NonNull Set<DynamicRange> highDynamicRangesSupportedByOutput) {
127         mHighDynamicRangesSupportedByOutput = highDynamicRangesSupportedByOutput;
128         // Initialize the GL context on the GL thread
129         mExecutor.execute(() -> mNativeContext = initContext());
130     }
131 
132     /**
133      * Attach the Preview to the renderer.
134      *
135      * @param preview Preview use-case used in the renderer.
136      * @return A {@link ListenableFuture} that signals the new surface is ready to be used in the
137      * renderer for the input Preview use-case.
138      */
139     @MainThread
140     @SuppressWarnings("ObjectToString")
attachInputPreview(@onNull Preview preview)141     @NonNull ListenableFuture<Void> attachInputPreview(@NonNull Preview preview) {
142         return CallbackToFutureAdapter.getFuture(completer -> {
143             preview.setSurfaceProvider(
144                     mExecutor,
145                     surfaceRequest -> {
146                         if (mIsShutdown) {
147                             surfaceRequest.willNotProvideSurface();
148                             return;
149                         }
150 
151                         SurfaceTexture surfaceTexture = resetPreviewTexture(
152                                 surfaceRequest.getResolution());
153                         Surface inputSurface = new Surface(surfaceTexture);
154                         mNumOutstandingSurfaces++;
155 
156                         DynamicRange newDynamicRange = surfaceRequest.getDynamicRange();
157                         int newRendererDynRng = RENDER_DYN_RNG_SDR;
158                         int newRendererBitDepth = 8;
159                         if (!Objects.equals(newDynamicRange, DynamicRange.SDR)) {
160                             if (supportsHdr(mNativeContext)
161                                     && outputSupportsDynamicRange(newDynamicRange)) {
162                                 if (newDynamicRange.getEncoding() == DynamicRange.ENCODING_HLG) {
163                                     newRendererDynRng = RENDER_DYN_RNG_HDR_HLG;
164                                 }
165                                 // TODO(b/303675500): Support PQ for HDR10/HDR10+. In the case
166                                 //  where the output does not support the dynamic range, we may
167                                 //  need to do tonemapping in the future.
168                                 newRendererBitDepth = newDynamicRange.getBitDepth();
169                             }
170                         }
171 
172                         if (newRendererDynRng != mRendererDynamicRange
173                                 || newRendererBitDepth != mRendererBitDepth) {
174                             mRendererDynamicRange = newRendererDynRng;
175                             mRendererBitDepth = newRendererBitDepth;
176                             updateRenderedDynamicRange(mNativeContext, mRendererDynamicRange,
177                                     mRendererBitDepth);
178                         }
179 
180                         surfaceRequest.setTransformationInfoListener(
181                                 mExecutor,
182                                 transformationInfo -> {
183                                     mMvpDirty = true;
184                                     mHasCameraTransform = transformationInfo.hasCameraTransform();
185                                     mSurfaceRequestRotationDegrees =
186                                             transformationInfo.getRotationDegrees();
187                                     if (!isCropRectFullTexture(transformationInfo.getCropRect())) {
188                                         // Crop rect is pre-calculated. Use it directly.
189                                         mPreviewCropRect = new RectF(
190                                                 transformationInfo.getCropRect());
191                                         mIsPreviewCropRectPrecalculated = true;
192                                     } else {
193                                         // Crop rect needs to be calculated before drawing.
194                                         mPreviewCropRect = null;
195                                         mIsPreviewCropRectPrecalculated = false;
196                                     }
197                                 });
198 
199                         surfaceRequest.provideSurface(
200                                 inputSurface,
201                                 mExecutor,
202                                 result -> {
203                                     inputSurface.release();
204                                     surfaceTexture.release();
205                                     if (surfaceTexture == mPreviewTexture) {
206                                         mPreviewTexture = null;
207                                     }
208                                     mNumOutstandingSurfaces--;
209                                     doShutdownExecutorIfNeeded();
210                                 });
211                         // Make sure the renderer use the new surface for the input Preview.
212                         completer.set(null);
213 
214                     });
215             return "attachInputPreview [" + this + "]";
216         });
217     }
218 
219     private boolean outputSupportsDynamicRange(DynamicRange newDynamicRange) {
220         if (!Objects.equals(newDynamicRange, DynamicRange.SDR)) {
221             return mHighDynamicRangesSupportedByOutput.contains(newDynamicRange);
222         }
223         // SDR is always supported
224         return true;
225     }
226 
227     void attachOutputSurface(
228             @NonNull Surface surface, @NonNull Size surfaceSize, int surfaceRotationDegrees) {
229         try {
230             mExecutor.execute(
231                     () -> {
232                         if (mIsShutdown) {
233                             return;
234                         }
235 
236                         if (setWindowSurface(mNativeContext, surface, mRendererDynamicRange)) {
237                             if (surfaceRotationDegrees != mSurfaceRotationDegrees
238                                     || !Objects.equals(surfaceSize, mSurfaceSize)) {
239                                 mMvpDirty = true;
240                             }
241                             mSurfaceRotationDegrees = surfaceRotationDegrees;
242                             mSurfaceSize = surfaceSize;
243                         } else {
244                             mSurfaceSize = null;
245                         }
246 
247                     });
248         } catch (RejectedExecutionException e) {
249             // Renderer is shutting down. Ignore.
250         }
251     }
252 
253 
254     /**
255      * Sets a listener to receive updates when a frame has been drawn to the output {@link Surface}.
256      *
257      * <p>Frame updates include the timestamp of the latest drawn frame.
258      *
259      * @param executor Executor used to call the listener.
260      * @param listener Listener which receives updates in the form of a timestamp (in nanoseconds).
261      */
262     void setFrameUpdateListener(@NonNull Executor executor, @NonNull Consumer<Long> listener) {
263         try {
264             mExecutor.execute(() -> mFrameUpdateListener = new Pair<>(executor, listener));
265         } catch (RejectedExecutionException e) {
266             // Renderer is shutting down. Ignore.
267         }
268     }
269 
270     void clearFrameUpdateListener() {
271         try {
272             mExecutor.execute(() -> mFrameUpdateListener = null);
273         } catch (RejectedExecutionException e) {
274             // Renderer is shutting down. Ignore.
275         }
276     }
277 
278     void invalidateSurface(int surfaceRotationDegrees) {
279         try {
280             mExecutor.execute(
281                     () -> {
282                         if (surfaceRotationDegrees != mSurfaceRotationDegrees) {
283                             mMvpDirty = true;
284                         }
285                         mSurfaceRotationDegrees = surfaceRotationDegrees;
286                         if (mPreviewTexture != null && !mIsShutdown) {
287                             renderLatest();
288                         }
289                     });
290         } catch (RejectedExecutionException e) {
291             // Renderer is shutting down. Ignore.
292         }
293     }
294 
295     /**
296      * Detach the current output surface from the renderer.
297      *
298      * @return A {@link ListenableFuture} that signals detach from the renderer. Some devices may
299      * not be able to handle the surface being released while still attached to an EGL context.
300      * It should be safe to release resources associated with the output surface once this future
301      * has completed.
302      */
303     @SuppressWarnings("ObjectToString")
304     ListenableFuture<Void> detachOutputSurface() {
305         return CallbackToFutureAdapter.getFuture(completer -> {
306             try {
307                 mExecutor.execute(
308                         () -> {
309                             if (!mIsShutdown) {
310                                 setWindowSurface(mNativeContext, null, RENDER_DYN_RNG_SDR);
311                                 mSurfaceSize = null;
312                             }
313                             completer.set(null);
314                         });
315             } catch (RejectedExecutionException e) {
316                 // Renderer is shutting down. Can notify that the surface is detached.
317                 completer.set(null);
318             }
319             return "detachOutputSurface [" + this + "]";
320         });
321     }
322 
323     void shutdown() {
324         try {
325             mExecutor.execute(
326                     () -> {
327                         if (!mIsShutdown) {
328                             closeContext(mNativeContext);
329                             mNativeContext = 0;
330                             mIsShutdown = true;
331                         }
332                         doShutdownExecutorIfNeeded();
333                     });
334         } catch (RejectedExecutionException e) {
335             // Renderer already shutting down. Ignore.
336         }
337     }
338 
339     @WorkerThread
340     private void doShutdownExecutorIfNeeded() {
341         if (mIsShutdown && mNumOutstandingSurfaces == 0) {
342             mFrameUpdateListener = null;
343             mExecutor.shutdown();
344         }
345     }
346 
347     @WorkerThread
348     private @NonNull SurfaceTexture resetPreviewTexture(@NonNull Size size) {
349         if (mPreviewTexture != null) {
350             mPreviewTexture.detachFromGLContext();
351         }
352 
353         mPreviewTexture = new SurfaceTexture(getTexName(mNativeContext));
354         mPreviewTexture.setDefaultBufferSize(size.getWidth(), size.getHeight());
355         mPreviewTexture.setOnFrameAvailableListener(
356                 surfaceTexture -> {
357                     if (surfaceTexture == mPreviewTexture && !mIsShutdown) {
358                         surfaceTexture.updateTexImage();
359                         renderLatest();
360                     }
361                 },
362                 mExecutor.getHandler());
363         if (!Objects.equals(size, mPreviewSize)) {
364             mMvpDirty = true;
365         }
366         mPreviewSize = size;
367         return mPreviewTexture;
368     }
369 
370     @WorkerThread
371     private void renderLatest() {
372         // Get the timestamp so we can pass it along to the output surface (not strictly necessary)
373         long timestampNs = mPreviewTexture.getTimestamp();
374 
375         // Get texture transform from surface texture (transform to natural orientation).
376         // This will be used to transform texture coordinates in the fragment shader.
377         mPreviewTexture.getTransformMatrix(mTextureTransform);
378         // Check whether the texture's rotation has changed so we can update the MVP matrix.
379         int textureRotationDegrees = getTextureRotationDegrees();
380         if (textureRotationDegrees != mTextureRotationDegrees) {
381             mMvpDirty = true;
382         }
383 
384         mTextureRotationDegrees = textureRotationDegrees;
385         if (mSurfaceSize != null) {
386             if (mMvpDirty) {
387                 updateMvpTransform();
388             }
389             boolean success = renderTexture(mNativeContext, timestampNs, mMvpTransform, mMvpDirty,
390                     mTextureTransform);
391             mMvpDirty = false;
392             if (success && mFrameUpdateListener != null) {
393                 Executor executor = Objects.requireNonNull(mFrameUpdateListener.first);
394                 Consumer<Long> listener = Objects.requireNonNull(mFrameUpdateListener.second);
395                 try {
396                     executor.execute(() -> listener.accept(timestampNs));
397                 } catch (RejectedExecutionException e) {
398                     // Unable to send frame update. Ignore.
399                 }
400             }
401         }
402     }
403 
404     /**
405      * Calculates the rotation of the source texture between the sensor coordinate space and
406      * the device's 'natural' orientation.
407      *
408      * <p>A required transform matrix is passed along with each texture update and is retrieved by
409      * {@link SurfaceTexture#getTransformMatrix(float[])}.
410      *
411      * <pre>{@code
412      *        TEXTURE FROM SENSOR:
413      * ^
414      * |                  +-----------+
415      * |          .#######|###        |
416      * |           *******|***        |
417      * |   ....###########|## ####. / |         Sensor may be rotated relative
418      * |  ################|## #( )#.  |         to the device's 'natural'
419      * |       ###########|## ######  |         orientation.
420      * |  ################|## #( )#*  |
421      * |   ****###########|## ####* \ |
422      * |           .......|...        |
423      * |          *#######|###        |
424      * |                  +-----------+
425      * +-------------------------------->
426      *                                               TRANSFORMED IMAGE:
427      *                 | |                   ^
428      *                 | |                   |         .            .
429      *                 | |                   |         \\ ........ //
430      *   Transform matrix from               |         ##############
431      *   SurfaceTexture#getTransformMatrix() |       ###(  )####(  )###
432      *   performs scale/crop/rotate on       |      ####################
433      *   image from sensor to produce        |     ######################
434      *   image in 'natural' orientation.     | ..  ......................  ..
435      *                 | |                   |#### ###################### ####
436      *                 | +-------\           |#### ###################### ####
437      *                 +---------/           |#### ###################### ####
438      *                                       +-------------------------------->
439      * }</pre>
440      *
441      * <p>The transform matrix is a 4x4 affine transform matrix that operates on standard normalized
442      * texture coordinates which are in the range of [0,1] for both s and t dimensions. Before
443      * the transform is applied, the texture may have dimensions that are larger than the
444      * dimensions of the SurfaceTexture we provided in order to accommodate hardware limitations.
445      *
446      * <p>For this method we are only interested in the rotation component of the transform
447      * matrix, so the calculations avoid the scaling and translation components.
448      */
449     @WorkerThread
450     private int getTextureRotationDegrees() {
451         // The final output image should have the requested dimensions AFTER applying the
452         // transform matrix, but width and height may be swapped. We know that the transform
453         // matrix from SurfaceTexture#getTransformMatrix() is an affine transform matrix that
454         // will only rotate in 90 degree increments, so we only need to worry about the rotation
455         // component.
456         //
457         // We can test this by using an test vector of [s, t, p, q] = [0, 1, 0, 0]. Using 'q = 0'
458         // will ignore the translation component of the matrix. We will only need to check if the
459         // 's' component becomes a scaled version of the 't' component and the 't' component
460         // becomes 0.
461         Matrix.multiplyMV(mTempVec, 0, mTextureTransform, 0, DIRECTION_UP_ROT_0, 0);
462 
463         // Calculate the normalized vector and round to integers so we can do integer comparison.
464         // Normalizing the vector removes the effects of the scaling component of the
465         // transform matrix. Once normalized, we can round and do integer comparison.
466         float length = Matrix.length(mTempVec[0], mTempVec[1], 0);
467         int s = Math.round(mTempVec[0] / length);
468         int t = Math.round(mTempVec[1] / length);
469         if (s == 0 && t == 1) {
470             //       (0,1)                               (0,1)
471             //    +----^----+          0 deg          +----^----+
472             //    |    |    |        Rotation         |    |    |
473             //    |    +    |         +----->         |    +    |
474             //    |  (0,0)  |                         |  (0,0)  |
475             //    +---------+                         +---------+
476             return 0;
477         } else if (s == 1 && t == 0) {
478             //       (0,1)
479             //    +----^----+         90 deg          +---------+
480             //    |    |    |        Rotation         |         |
481             //    |    +    |         +----->         |    +---->(1,0)
482             //    |  (0,0)  |                         |  (0,0)  |
483             //    +---------+                         +---------+
484             return 90;
485         } else if (s == 0 && t == -1) {
486             //       (0,1)
487             //    +----^----+         180 deg         +---------+
488             //    |    |    |        Rotation         |  (0,0)  |
489             //    |    +    |         +----->         |    +    |
490             //    |  (0,0)  |                         |    |    |
491             //    +---------+                         +----v----+
492             //                                           (0,-1)
493             return 180;
494         } else if (s == -1 && t == 0) {
495             //       (0,1)
496             //    +----^----+         270 deg         +---------+
497             //    |    |    |        Rotation         |         |
498             //    |    +    |         +----->   (-1,0)<----+    |
499             //    |  (0,0)  |                         |  (0,0)  |
500             //    +---------+                         +---------+
501             return 270;
502         }
503 
504         throw new RuntimeException(String.format("Unexpected texture transform matrix. Expected "
505                 + "test vector [0, 1] to rotate to [0,1], [1, 0], [0, -1] or [-1, 0], but instead "
506                 + "was [%d, %d].", s, t));
507     }
508 
509 
510     /**
511      * Returns true if the crop rect dimensions match the entire texture dimensions.
512      */
513     @WorkerThread
514     private boolean isCropRectFullTexture(@NonNull Rect cropRect) {
515         return cropRect.left == 0 && cropRect.top == 0
516                 && cropRect.width() == mPreviewSize.getWidth()
517                 && cropRect.height() == mPreviewSize.getHeight();
518     }
519 
520     /**
521      * Derives the model crop rect from the texture and output surface dimensions, applying a
522      * 'center-crop' transform.
523      *
524      * <p>Because the camera sensor (or crop of the camera sensor) may have a different
525      * aspect ratio than the ViewPort that is meant to display it, we want to fit the image
526      * from the camera so the entire ViewPort is filled. This generally requires scaling the input
527      * texture and cropping pixels from either the width or height. We call this transform
528      * 'center-crop' and is equivalent to {@link android.widget.ImageView.ScaleType#CENTER_CROP}.
529      */
530     @WorkerThread
531     private void extractPreviewCropFromPreviewSizeAndSurface() {
532         // Swap the dimensions of the surface we are drawing the texture onto if rotating the
533         // texture to the surface orientation requires a 90 degree or 270 degree rotation.
534         int viewPortRotation = getViewPortRotation();
535         if (viewPortRotation == 90 || viewPortRotation == 270) {
536             // Width and height swapped
537             mPreviewCropRect = new RectF(0, 0, mSurfaceSize.getHeight(), mSurfaceSize.getWidth());
538         } else {
539             mPreviewCropRect = new RectF(0, 0, mSurfaceSize.getWidth(), mSurfaceSize.getHeight());
540         }
541 
542         android.graphics.Matrix centerCropMatrix = new android.graphics.Matrix();
543         RectF previewSize = new RectF(0, 0, mPreviewSize.getWidth(), mPreviewSize.getHeight());
544         centerCropMatrix.setRectToRect(mPreviewCropRect, previewSize,
545                 android.graphics.Matrix.ScaleToFit.CENTER);
546         centerCropMatrix.mapRect(mPreviewCropRect);
547     }
548 
549     /**
550      * Returns the relative rotation between the sensor coordinates and the ViewPort in
551      * world-space coordinates.
552      *
553      * <p>This is the angle the sensor needs to be rotated, clockwise, in order to be upright in
554      * the viewport coordinates.
555      */
556     @WorkerThread
557     private int getViewPortRotation() {
558         // Note that since the rotation defined by Surface#ROTATION_*** are positive when the
559         // device is rotated in a counter-clockwise direction and our world-space coordinates
560         // define positive angles in the clockwise direction, we add the two together to get the
561         // total angle required.
562         if (mHasCameraTransform) {
563             // If the Surface is connected to the camera, there is surface rotation encoded in
564             // the SurfaceTexture.
565             return within360((180 - mTextureRotationDegrees) + mSurfaceRotationDegrees);
566         } else {
567             // When the Surface is connected to an internal OpenGl renderer, the texture rotation
568             // is always 0. Use the rotation provided by SurfaceRequest instead.
569             return mSurfaceRequestRotationDegrees;
570         }
571     }
572 
573     /**
574      * Updates the matrix used to transform the model into the correct dimensions within the
575      * world-space.
576      *
577      * <p>In order to draw the camera frames to screen, we use a flat rectangle in our
578      * world-coordinate space. The world coordinates match the preview buffer coordinates with
579      * the origin (0,0) in the upper left corner of the image. Defining the world space in this
580      * way allows subsequent models to be positioned according to buffer coordinates.
581      * Note this different than standard OpenGL coordinates; this is a left-handed coordinate
582      * system, and requires using glFrontFace(GL_CW) before drawing.
583      * <pre>{@code
584      *             Standard coordinates:                   Our coordinate system:
585      *
586      *                      | +y                                  ________+x
587      *                      |                                   /|
588      *                      |                                  / |
589      *                      |________+x                     +z/  |
590      *                     /                                     | +y
591      *                    /
592      *                   /+z
593      * }</pre>
594      * <p>Our model is initially a square with vertices in the range (-1,-1 - 1,1). It is
595      * rotated, scaled and translated to match the dimensions of preview with the origin in the
596      * upper left corner.
597      *
598      * <p>Example for a preview with dimensions 1920x1080:
599      * <pre>{@code
600      *                (-1,-1)    (1,-1)
601      *                   +---------+        Model
602      *                   |         |        Transform          (0,0)         (1920,0)
603      * Unscaled Model -> |    +    |         ---\                +----------------+
604      *                   |         |         ---/                |                |      Scaled/
605      *                   +---------+                             |                | <-- Translated
606      *                (-1,1)     (1,1)                           |                |       Model
607      *                                                           +----------------+
608      *                                                         (0,1080)      (1920,1080)
609      * }</pre>
610      */
611     @WorkerThread
612     private void updateModelTransform() {
613         // Remove the rotation to the device 'natural' orientation so our world space will be in
614         // sensor coordinates.
615         Matrix.setRotateM(mTempMatrix, 0, -(180 - mTextureRotationDegrees), 0.0f, 0.0f, 1.0f);
616 
617         Matrix.setIdentityM(mTempMatrix, 16);
618         // Translate to the upper left corner of the quad so we are in buffer space
619         Matrix.translateM(mTempMatrix, 16, mPreviewSize.getWidth() / 2f,
620                 mPreviewSize.getHeight() / 2f, 0);
621         // Scale the vertices so that our world space units are pixels equal in size to the
622         // pixels of the buffer sent from the camera.
623         Matrix.scaleM(mTempMatrix, 16, mPreviewSize.getWidth() / 2f, mPreviewSize.getHeight() / 2f,
624                 1f);
625         Matrix.multiplyMM(mModelTransform, 0, mTempMatrix, 16, mTempMatrix, 0);
626         if (DEBUG) {
627             printMatrix("ModelTransform", mModelTransform, 0);
628         }
629     }
630 
631     /**
632      * The view transform defines the position and orientation of the camera within our world-space.
633      *
634      * <p>This brings us from world-space coordinates to view (camera) space.
635      *
636      * <p>This matrix is defined by a camera position, a gaze point, and a vector that represents
637      * the "up" direction. Because we are using an orthogonal projection, we always place the
638      * camera directly in front of the gaze point and 1 unit away on the z-axis for convenience.
639      * We have defined our world coordinates in a way where we will be looking at the front of
640      * the model rectangle if our camera is placed on the positive z-axis and we gaze towards
641      * the negative z-axis.
642      */
643     @WorkerThread
644     private void updateViewTransform() {
645         // Apply the rotation of the ViewPort and look at the center of the image
646         float[] upVec = DIRECTION_UP_ROT_0;
647         switch (getViewPortRotation()) {
648             case 0:
649                 upVec = DIRECTION_UP_ROT_0;
650                 break;
651             case 90:
652                 upVec = DIRECTION_UP_ROT_90;
653                 break;
654             case 180:
655                 upVec = DIRECTION_UP_ROT_180;
656                 break;
657             case 270:
658                 upVec = DIRECTION_UP_ROT_270;
659                 break;
660         }
661         Matrix.setLookAtM(mViewTransform, 0,
662                 mPreviewCropRect.centerX(), mPreviewCropRect.centerY(), 1, // Camera position
663                 mPreviewCropRect.centerX(), mPreviewCropRect.centerY(), 0, // Point to look at
664                 upVec[0], upVec[1], upVec[2] // Up direction
665         );
666         if (DEBUG) {
667             printMatrix("ViewTransform", mViewTransform, 0);
668         }
669     }
670 
671     /**
672      * The projection matrix will map from the view space to normalized device coordinates (NDC)
673      * which OpenGL is expecting.
674      *
675      * <p>Our view is meant to only show the pixels defined by the model crop rect, so our
676      * orthogonal projection matrix will depend on the preview crop rect dimensions.
677      *
678      * <p>The projection matrix can be thought of as a cube which has sides that align with the
679      * edges of the ViewPort and the near/far sides can be adjusted as needed. In our case, we
680      * set the near side to match the camera position and the far side to match the model's
681      * position on the z-axis, 1 unit away.
682      */
683     @WorkerThread
684     private void updateProjectionTransform() {
685         float viewPortWidth = mPreviewCropRect.width();
686         float viewPortHeight = mPreviewCropRect.height();
687         // Since projection occurs after rotation of the camera, in order to map directly to model
688         // coordinates we need to take into account the surface rotation.
689         int viewPortRotation = getViewPortRotation();
690         if (viewPortRotation == 90 || viewPortRotation == 270) {
691             viewPortWidth = mPreviewCropRect.height();
692             viewPortHeight = mPreviewCropRect.width();
693         }
694 
695         Matrix.orthoM(mProjectionTransform, 0,
696                 /*left=*/-viewPortWidth / 2f, /*right=*/viewPortWidth / 2f,
697                 /*bottom=*/viewPortHeight / 2f, /*top=*/-viewPortHeight / 2f,
698                 /*near=*/0, /*far=*/1);
699         if (DEBUG) {
700             printMatrix("ProjectionTransform", mProjectionTransform, 0);
701         }
702     }
703 
704     /**
705      * The MVP is the combination of model, view and projection transforms that take us from the
706      * world space to normalized device coordinates (NDC) which OpenGL uses to display images
707      * with the correct dimensions on an EGL surface.
708      */
709     @WorkerThread
710     private void updateMvpTransform() {
711         if (!mIsPreviewCropRectPrecalculated) {
712             extractPreviewCropFromPreviewSizeAndSurface();
713         }
714 
715         if (DEBUG) {
716             Log.d(TAG, String.format("Model dimensions: %s, Crop rect: %s", mPreviewSize,
717                     mPreviewCropRect));
718         }
719 
720         updateModelTransform();
721         updateViewTransform();
722         updateProjectionTransform();
723 
724         Matrix.multiplyMM(mTempMatrix, 0, mViewTransform, 0, mModelTransform, 0);
725 
726         if (DEBUG) {
727             // Print the model-view matrix (without projection)
728             printMatrix("MVTransform", mTempMatrix, 0);
729         }
730 
731         Matrix.multiplyMM(mMvpTransform, 0, mProjectionTransform, 0, mTempMatrix, 0);
732         if (DEBUG) {
733             printMatrix("MVPTransform", mMvpTransform, 0);
734         }
735     }
736 
737     private static void printMatrix(String label, float[] matrix, int offset) {
738         Log.d(TAG, String.format("%s:\n"
739                         + "%.4f %.4f %.4f %.4f\n"
740                         + "%.4f %.4f %.4f %.4f\n"
741                         + "%.4f %.4f %.4f %.4f\n"
742                         + "%.4f %.4f %.4f %.4f\n", label,
743                 matrix[offset], matrix[offset + 4], matrix[offset + 8], matrix[offset + 12],
744                 matrix[offset + 1], matrix[offset + 5], matrix[offset + 9], matrix[offset + 13],
745                 matrix[offset + 2], matrix[offset + 6], matrix[offset + 10], matrix[offset + 14],
746                 matrix[offset + 3], matrix[offset + 7], matrix[offset + 11], matrix[offset + 15]));
747     }
748 
749     @WorkerThread
750     private static native long initContext();
751 
752     @WorkerThread
753     private static native boolean setWindowSurface(long nativeContext, @Nullable Surface surface,
754             @RendererDynamicRange int dynamicRange);
755 
756     @WorkerThread
757     private static native int getTexName(long nativeContext);
758 
759     @WorkerThread
760     private static native boolean renderTexture(
761             long nativeContext,
762             long timestampNs,
763             float @NonNull [] mvpTransform,
764             boolean mvpDirty,
765             float @NonNull [] textureTransform);
766 
767     @WorkerThread
768     private static native void closeContext(long nativeContext);
769 
770     @WorkerThread
771     private static native void updateRenderedDynamicRange(long nativeContext,
772             @RendererDynamicRange int dynamicRange, int bitDepth);
773 
774     @WorkerThread
775     private static native boolean supportsHdr(long nativeContext);
776 }
777