• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2023 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package com.android.DeviceAsWebcam;
18 
19 import android.content.Context;
20 import android.graphics.Bitmap;
21 import android.graphics.BitmapFactory;
22 import android.graphics.Canvas;
23 import android.graphics.Matrix;
24 import android.graphics.Point;
25 import android.graphics.Rect;
26 import android.graphics.SurfaceTexture;
27 import android.hardware.HardwareBuffer;
28 import android.hardware.camera2.CameraAccessException;
29 import android.hardware.camera2.CameraCaptureSession;
30 import android.hardware.camera2.CameraCharacteristics;
31 import android.hardware.camera2.CameraDevice;
32 import android.hardware.camera2.CameraManager;
33 import android.hardware.camera2.CameraMetadata;
34 import android.hardware.camera2.CaptureRequest;
35 import android.hardware.camera2.params.MeteringRectangle;
36 import android.hardware.camera2.params.OutputConfiguration;
37 import android.hardware.camera2.params.SessionConfiguration;
38 import android.hardware.camera2.params.StreamConfigurationMap;
39 import android.hardware.display.DisplayManager;
40 import android.media.Image;
41 import android.media.ImageReader;
42 import android.media.ImageWriter;
43 import android.os.ConditionVariable;
44 import android.os.Handler;
45 import android.os.HandlerThread;
46 import android.util.ArrayMap;
47 import android.util.Log;
48 import android.util.Range;
49 import android.util.Rational;
50 import android.util.Size;
51 import android.view.Display;
52 import android.view.Surface;
53 
54 import androidx.annotation.NonNull;
55 import androidx.annotation.Nullable;
56 
57 import com.android.DeviceAsWebcam.utils.UserPrefs;
58 
59 import java.lang.ref.WeakReference;
60 import java.nio.ByteBuffer;
61 import java.util.ArrayList;
62 import java.util.Arrays;
63 import java.util.Comparator;
64 import java.util.List;
65 import java.util.Objects;
66 import java.util.concurrent.ConcurrentHashMap;
67 import java.util.concurrent.Executor;
68 import java.util.concurrent.Executors;
69 import java.util.concurrent.ScheduledExecutorService;
70 import java.util.concurrent.TimeUnit;
71 import java.util.concurrent.atomic.AtomicBoolean;
72 import java.util.function.Consumer;
73 
74 /**
75  * This class controls the operation of the camera - primarily through the public calls
76  * - startPreviewStreaming
77  * - startWebcamStreaming
78  * - stopPreviewStreaming
79  * - stopWebcamStreaming
80  * These calls do what they suggest - that is start / stop preview and webcam streams. They
81  * internally book-keep whether they need to start a preview stream alongside a webcam stream or
82  * by itself, and vice-versa.
83  * For the webcam stream, it delegates the job of interacting with the native service
84  * code - used for encoding ImageReader image callbacks, to the Foreground service (it stores a weak
85  * reference to the foreground service during construction).
86  */
87 public class CameraController {
88     private static final String TAG = "CameraController";
89     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
90 
91     // Camera session state - when camera is actually being used
92     enum CameraStreamingState {
93         NO_STREAMING,
94         WEBCAM_STREAMING,
95         PREVIEW_STREAMING,
96         PREVIEW_AND_WEBCAM_STREAMING
97     };
98 
99     // Camera availability states
100     enum CameraAvailabilityState {
101         AVAILABLE,
102         UNAVAILABLE
103     };
104 
105     private static final int MAX_BUFFERS = 4;
106     // The ratio to the active array size that will be used to determine the metering rectangle
107     // size.
108     private static final float METERING_RECTANGLE_SIZE_RATIO = 0.15f;
109 
110     @Nullable
111     private CameraId mBackCameraId = null;
112     @Nullable
113     private CameraId mFrontCameraId = null;
114 
115     private ImageReader mImgReader;
116     private Object mImgReaderLock = new Object();
117     private ImageWriter mImageWriter;
118 
119     // current camera session state
120     private CameraStreamingState mCurrentState = CameraStreamingState.NO_STREAMING;
121 
122     // current camera availability state - to be accessed only from camera related callbacks which
123     // execute on mCameraCallbacksExecutor. This isn't a part of mCameraInfo since that is static
124     // information about a camera and has looser thread access requirements.
125     private ArrayMap<String, CameraAvailabilityState> mCameraAvailabilityState = new ArrayMap<>();
126 
127     private Context mContext;
128     private WeakReference<DeviceAsWebcamFgService> mServiceWeak;
129     private CaptureRequest.Builder mPreviewRequestBuilder;
130     private CameraManager mCameraManager;
131     private CameraDevice mCameraDevice;
132     private Handler mImageReaderHandler;
133     private Executor mCameraCallbacksExecutor;
134     private Executor mServiceEventsExecutor;
135     private SurfaceTexture mPreviewSurfaceTexture;
136     /**
137      * Registered by the Preview Activity, and called by CameraController when preview size changes
138      * as a result of the webcam stream changing.
139      */
140     private Consumer<Size> mPreviewSizeChangeListener;
141     private Surface mPreviewSurface;
142     private Size mDisplaySize;
143     private Size mPreviewSize;
144     // Executor for ImageWriter thread - used when camera is evicted and webcam is streaming.
145     private ScheduledExecutorService mImageWriterEventsExecutor;
146 
147     // This is set up only when we need to show the camera access blocked logo and reset
148     // when camera is available again - since its going to be a rare occurrence that camera is
149     // actually evicted when webcam is streaming.
150     private byte[] mCombinedBitmapBytes;
151 
152     private OutputConfiguration mPreviewOutputConfiguration;
153     private OutputConfiguration mWebcamOutputConfiguration;
154     private List<OutputConfiguration> mOutputConfigurations;
155     private CameraCaptureSession mCaptureSession;
156     private ConditionVariable mReadyToStream = new ConditionVariable();
157     private ConditionVariable mCaptureSessionReady = new ConditionVariable();
158     private AtomicBoolean mStartCaptureWebcamStream = new AtomicBoolean(false);
159     private final Object mSerializationLock = new Object();
160     // timestamp -> Image
161     private ConcurrentHashMap<Long, ImageAndBuffer> mImageMap = new ConcurrentHashMap<>();
162     private List<CameraId> mAvailableCameraIds = new ArrayList<>();
163     @Nullable
164     private CameraId mCameraId = null;
165     private ArrayMap<CameraId, CameraInfo> mCameraInfoMap = new ArrayMap<>();
166     @Nullable
167     private float[] mTapToFocusPoints = null;
168     private static class StreamConfigs {
StreamConfigs(boolean mjpegP, int widthP, int heightP, int fpsP)169         StreamConfigs(boolean mjpegP, int widthP, int heightP, int fpsP) {
170             isMjpeg = mjpegP;
171             width = widthP;
172             height = heightP;
173             fps = fpsP;
174         }
175 
176         boolean isMjpeg;
177         int width;
178         int height;
179         int fps;
180     };
181     private StreamConfigs mStreamConfigs;
182     private CameraDevice.StateCallback mCameraStateCallback = new CameraDevice.StateCallback() {
183         @Override
184         public void onOpened(@NonNull CameraDevice cameraDevice) {
185             if (VERBOSE) {
186                 Log.v(TAG, "Camera device opened, creating capture session now");
187             }
188             mCameraDevice = cameraDevice;
189             mReadyToStream.open();
190         }
191 
192         @Override
193         public void onDisconnected(CameraDevice cameraDevice) {
194             if (VERBOSE) {
195                 Log.v(TAG, "onDisconnected: " + cameraDevice.getId() +
196                         " camera available state " +
197                         mCameraAvailabilityState.get(cameraDevice.getId()));
198             }
199             handleDisconnected();
200         }
201 
202         private void handleDisconnected() {
203             mServiceEventsExecutor.execute(() -> {
204                 synchronized (mSerializationLock) {
205                     mCameraDevice = null;
206                     stopStreamingAltogetherLocked(/*closeImageReader*/false);
207                     if (mStartCaptureWebcamStream.get()) {
208                         startShowingCameraUnavailableLogo();
209                     }
210                 }
211             });
212         }
213         @Override
214         public void onError(@NonNull CameraDevice cameraDevice, int error) {
215             if (VERBOSE) {
216                 Log.e(TAG, "Camera id  " + cameraDevice.getId() + ": onError " + error);
217             }
218             mReadyToStream.open();
219             if (mStartCaptureWebcamStream.get()) {
220                 startShowingCameraUnavailableLogo();
221             }
222         }
223     };
224     private CameraCaptureSession.CaptureCallback mCaptureCallback =
225             new CameraCaptureSession.CaptureCallback() {};
226 
227     private CameraCaptureSession.StateCallback mCameraCaptureSessionCallback =
228             new CameraCaptureSession.StateCallback() {
229                 @Override
230                 public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
231                     if (mCameraDevice == null) {
232                         return;
233                     }
234                     mCaptureSession = cameraCaptureSession;
235                     try {
236                         mCaptureSession.setSingleRepeatingRequest(
237                                 mPreviewRequestBuilder.build(), mCameraCallbacksExecutor,
238                                 mCaptureCallback);
239                     } catch (CameraAccessException e) {
240                         Log.e(TAG, "setSingleRepeatingRequest failed", e);
241                     }
242                     mCaptureSessionReady.open();
243                 }
244 
245                 @Override
246                 public void onConfigureFailed(@NonNull CameraCaptureSession captureSession) {
247                     Log.e(TAG, "Failed to configure CameraCaptureSession");
248                 }
249             };
250 
251     private CameraManager.AvailabilityCallback mCameraAvailabilityCallbacks =
252             new CameraManager.AvailabilityCallback() {
253         @Override
254         public void onCameraAvailable(String cameraId) {
255             mCameraAvailabilityState.put(cameraId, CameraAvailabilityState.AVAILABLE);
256             if (VERBOSE) {
257                 Log.v(TAG, "onCameraAvailable: " + cameraId);
258             }
259             // We want to attempt to start webcam streaming when :
260             // webcam was already streaming and the camera that was streaming became available.
261             // The attempt to start streaming the camera may succeed or fail. If it fails,
262             // (for example: if the camera is available but another client is using a camera which
263             // cannot be opened concurrently with mCameraId), it'll be handled by the onError
264             // callback.
265             if (mStartCaptureWebcamStream.get() &&
266                     mCameraAvailabilityState.get(mCameraId.mainCameraId) ==
267                             CameraAvailabilityState.AVAILABLE) {
268                 if (VERBOSE) {
269                     Log.v(TAG, "Camera available : try starting webcam stream for camera id "
270                             + mCameraId.mainCameraId);
271                 }
272                 handleOnCameraAvailable();
273             }
274 
275         }
276 
277         @Override
278         public void onCameraUnavailable(String cameraId) {
279             // We're unconditionally waiting for available - mStartCaptureWebcamStream will decide
280             // whether we need to do anything about it.
281             if (VERBOSE) {
282                 Log.v(TAG, "Camera id " + cameraId + " unavailable");
283             }
284             mCameraAvailabilityState.put(cameraId, CameraAvailabilityState.UNAVAILABLE);
285         }
286     };
287 
288     private ImageReader.OnImageAvailableListener mOnImageAvailableListener =
289             new ImageReader.OnImageAvailableListener() {
290                 @Override
291                 public void onImageAvailable(ImageReader reader) {
292                     Image image;
293                     HardwareBuffer hardwareBuffer;
294                     long ts;
295                     DeviceAsWebcamFgService service = mServiceWeak.get();
296                     synchronized (mImgReaderLock) {
297                         if (reader != mImgReader) {
298                             return;
299                         }
300                         if (service == null) {
301                             Log.e(TAG, "Service is dead, what ?");
302                             return;
303                         }
304                         if (mImageMap.size() >= MAX_BUFFERS) {
305                             Log.w(TAG, "Too many buffers acquired in onImageAvailable, returning");
306                             return;
307                         }
308                         // Get native HardwareBuffer from the next image (we should never
309                         // accumulate images since we're not doing any compute work on the
310                         // imageReader thread) and
311                         // send it to the native layer for the encoder to process.
312                         // Acquire latest Image and get the HardwareBuffer
313                         image = reader.acquireNextImage();
314                         if (VERBOSE) {
315                             Log.v(TAG, "Got acquired Image in onImageAvailable callback for reader "
316                                     + reader);
317                         }
318                         if (image == null) {
319                             if (VERBOSE) {
320                                 Log.e(TAG, "More images than MAX acquired ?");
321                             }
322                             return;
323                         }
324                         ts = image.getTimestamp();
325                         hardwareBuffer = image.getHardwareBuffer();
326                     }
327                     mImageMap.put(ts, new ImageAndBuffer(image, hardwareBuffer));
328                     // Callback into DeviceAsWebcamFgService to encode image
329                     if ((!mStartCaptureWebcamStream.get()) || (service.nativeEncodeImage(
330                             hardwareBuffer, ts, getCurrentRotation()) != 0)) {
331                         if (VERBOSE) {
332                             Log.v(TAG,
333                                     "Couldn't get buffer immediately, returning image images. "
334                                             + "acquired size "
335                                             + mImageMap.size());
336                         }
337                         returnImage(ts);
338                     }
339                 }
340             };
341 
342     private volatile float mZoomRatio;
343     private RotationProvider mRotationProvider;
344     private RotationUpdateListener mRotationUpdateListener = null;
345     private CameraInfo mCameraInfo = null;
346     private UserPrefs mUserPrefs;
347     VendorCameraPrefs mRroCameraInfo;
348 
CameraController(Context context, WeakReference<DeviceAsWebcamFgService> serviceWeak)349     public CameraController(Context context, WeakReference<DeviceAsWebcamFgService> serviceWeak) {
350         mContext = context;
351         mServiceWeak = serviceWeak;
352         if (mContext == null) {
353             Log.e(TAG, "Application context is null!, something is going to go wrong");
354             return;
355         }
356         startBackgroundThread();
357         mCameraManager = mContext.getSystemService(CameraManager.class);
358         mDisplaySize = getDisplayPreviewSize();
359         mCameraManager.registerAvailabilityCallback(
360                 mCameraCallbacksExecutor, mCameraAvailabilityCallbacks);
361         mRroCameraInfo = VendorCameraPrefs.getVendorCameraPrefsFromJson(mContext);
362         mUserPrefs = new UserPrefs(mContext);
363 
364         refreshAvailableCameraIdList();
365         refreshLensFacingCameraIds();
366 
367         mCameraId = fetchCameraIdFromUserPrefs(/*defaultCameraId*/ mBackCameraId);
368         mCameraInfo = getOrCreateCameraInfo(mCameraId);
369         mZoomRatio = mUserPrefs.fetchZoomRatio(mCameraId.toString(), /*defaultZoom*/ 1.0f);
370 
371         mRotationProvider = new RotationProvider(context.getApplicationContext(),
372                 mCameraInfo.getSensorOrientation(), mCameraInfo.getLensFacing());
373         // Adds a listener to enable the RotationProvider so that we can get the rotation
374         // degrees info to rotate the webcam stream images.
375         mRotationProvider.addListener(mCameraCallbacksExecutor, rotation -> {
376             if (mRotationUpdateListener != null) {
377                 mRotationUpdateListener.onRotationUpdated(rotation);
378             }
379         });
380     }
381 
382     @Nullable
fetchCameraIdFromUserPrefs(@ullable CameraId defaultCameraId)383     private CameraId fetchCameraIdFromUserPrefs(@Nullable CameraId defaultCameraId) {
384         String cameraIdString = mUserPrefs.fetchCameraId(null);
385         CameraId cameraId = convertAndValidateCameraIdString(cameraIdString);
386         return cameraId != null ? cameraId : defaultCameraId;
387     }
388 
389     @Nullable
fetchBackCameraIdFromUserPrefs(@ullable CameraId defaultCameraId)390     private CameraId fetchBackCameraIdFromUserPrefs(@Nullable CameraId defaultCameraId) {
391         String cameraIdString = mUserPrefs.fetchBackCameraId(null);
392         CameraId cameraId = convertAndValidateCameraIdString(cameraIdString);
393         return cameraId != null ? cameraId : defaultCameraId;
394     }
395 
396     @Nullable
fetchFrontCameraIdFromUserPrefs(@ullable CameraId defaultCameraId)397     private CameraId fetchFrontCameraIdFromUserPrefs(@Nullable CameraId defaultCameraId) {
398         String cameraIdString = mUserPrefs.fetchFrontCameraId(null);
399         CameraId cameraId = convertAndValidateCameraIdString(cameraIdString);
400         return cameraId != null ? cameraId : defaultCameraId;
401     }
402 
403     /**
404      * Converts the camera id string to {@link CameraId} and returns it only when it is includes in
405      * the available camera id list.
406      */
407     @Nullable
convertAndValidateCameraIdString(@ullable String cameraIdString)408     private CameraId convertAndValidateCameraIdString(@Nullable String cameraIdString) {
409         CameraId cameraId = CameraId.fromCameraIdString(cameraIdString);
410         if (cameraId != null && !mAvailableCameraIds.contains(cameraId)) {
411             cameraId = null;
412         }
413         return cameraId;
414     }
415 
convertARGBToRGBA(ByteBuffer argb)416     private void convertARGBToRGBA(ByteBuffer argb) {
417         // Android Bitmap.Config.ARGB_8888 is laid out as RGBA in an int and java ByteBuffer by
418         // default is big endian.
419         for (int i = 0; i < argb.capacity(); i+= 4) {
420             byte r = argb.get(i);
421             byte g = argb.get(i + 1);
422             byte b = argb.get(i + 2);
423             byte a = argb.get(i + 3);
424 
425             //libyuv expects BGRA
426             argb.put(i, b);
427             argb.put(i + 1, g);
428             argb.put(i + 2, r);
429             argb.put(i + 3, a);
430         }
431     }
432 
setupBitmaps(int width, int height)433     private void setupBitmaps(int width, int height) {
434         // Initialize logoBitmap. Should fit 'in' enclosed by any webcam stream
435         BitmapFactory.Options options = new BitmapFactory.Options();
436         options.inPreferredConfig = Bitmap.Config.ARGB_8888;
437         // We want 1/2 of the screen being covered by the camera blocked logo
438         Bitmap logoBitmap =
439                 BitmapFactory.decodeResource(mContext.getResources(),
440                         R.drawable.camera_access_blocked, options);
441         int scaledWidth, scaledHeight;
442         if (logoBitmap.getWidth() > logoBitmap.getHeight()) {
443             scaledWidth = (int)(0.5 * width);
444             scaledHeight =
445                     (int)(scaledWidth * (float)logoBitmap.getHeight() / logoBitmap.getWidth());
446         } else {
447             scaledHeight = (int)(0.5 * height);
448             scaledWidth =
449                     (int)(scaledHeight * (float)logoBitmap.getWidth() / logoBitmap.getHeight());
450         }
451         // Combined Bitmap which will hold background + camera access blocked image
452         Bitmap combinedBitmap =
453                 Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
454         Canvas canvas = new Canvas(combinedBitmap);
455         // Offsets to start composed image from
456         int offsetX = (width - scaledWidth) / 2;
457         int offsetY = (height - scaledHeight)/ 2;
458         int endX = offsetX + scaledWidth;
459         int endY = offsetY + scaledHeight;
460         canvas.drawBitmap(logoBitmap,
461                 new Rect(0, 0, logoBitmap.getWidth(), logoBitmap.getHeight()),
462                 new Rect(offsetX, offsetY, endX, endY), null);
463         ByteBuffer byteBuffer = ByteBuffer.allocate(combinedBitmap.getByteCount());
464         combinedBitmap.copyPixelsToBuffer(byteBuffer);
465         convertARGBToRGBA(byteBuffer);
466         mCombinedBitmapBytes = byteBuffer.array();
467     }
468 
refreshAvailableCameraIdList()469     private void refreshAvailableCameraIdList() {
470         String[] cameraIdList;
471         try {
472             cameraIdList = mCameraManager.getCameraIdList();
473         } catch (CameraAccessException e) {
474             Log.e(TAG, "Failed to retrieve the camera id list from CameraManager!", e);
475             return;
476         }
477 
478         List<String> ignoredCameraList = mRroCameraInfo.getIgnoredCameraList();
479 
480         for (String cameraId : cameraIdList) {
481             // Skips the ignored cameras
482             if (ignoredCameraList.contains(cameraId)) {
483                 continue;
484             }
485 
486             CameraCharacteristics characteristics = getCameraCharacteristicsOrNull(cameraId);
487 
488             if (characteristics == null) {
489                 continue;
490             }
491 
492             // Only lists backward compatible cameras
493             if (!isBackwardCompatible(characteristics)) {
494                 continue;
495             }
496 
497             List<VendorCameraPrefs.PhysicalCameraInfo> physicalCameraInfos =
498                     mRroCameraInfo.getPhysicalCameraInfos(cameraId);
499 
500             if (physicalCameraInfos == null || physicalCameraInfos.isEmpty()) {
501                 mAvailableCameraIds.add(new CameraId(cameraId, null));
502                 continue;
503             }
504 
505             for (VendorCameraPrefs.PhysicalCameraInfo physicalCameraInfo :
506                     physicalCameraInfos) {
507                 // Only lists backward compatible cameras
508                 CameraCharacteristics physChars = getCameraCharacteristicsOrNull(
509                         physicalCameraInfo.physicalCameraId);
510                 if (isBackwardCompatible(physChars)) {
511                     mAvailableCameraIds.add(
512                             new CameraId(cameraId, physicalCameraInfo.physicalCameraId));
513                 }
514             }
515         }
516     }
517 
refreshLensFacingCameraIds()518     private void refreshLensFacingCameraIds() {
519         // Loads the default back and front camera from the user prefs.
520         mBackCameraId = fetchBackCameraIdFromUserPrefs(null);
521         mFrontCameraId = fetchFrontCameraIdFromUserPrefs(null);
522 
523         if (mBackCameraId != null && mFrontCameraId != null) {
524             return;
525         }
526 
527         for (CameraId cameraId : mAvailableCameraIds) {
528             CameraCharacteristics characteristics = getCameraCharacteristicsOrNull(
529                     cameraId.mainCameraId);
530             if (characteristics == null) {
531                 continue;
532             }
533 
534             Integer lensFacing = getCameraCharacteristic(characteristics,
535                     CameraCharacteristics.LENS_FACING);
536             if (lensFacing == null) {
537                 continue;
538             }
539             if (mBackCameraId == null && lensFacing == CameraMetadata.LENS_FACING_BACK) {
540                 mBackCameraId = cameraId;
541             } else if (mFrontCameraId == null
542                     && lensFacing == CameraMetadata.LENS_FACING_FRONT) {
543                 mFrontCameraId = cameraId;
544             }
545         }
546     }
547 
548     /**
549      * Returns the available {@link CameraId} list.
550      */
getAvailableCameraIds()551     public List<CameraId> getAvailableCameraIds() {
552         return mAvailableCameraIds;
553     }
554 
getOrCreateCameraInfo(CameraId cameraId)555     public CameraInfo getOrCreateCameraInfo(CameraId cameraId) {
556         CameraInfo cameraInfo = mCameraInfoMap.get(cameraId);
557         if (cameraInfo != null) {
558             return cameraInfo;
559         }
560 
561         cameraInfo = createCameraInfo(cameraId);
562         mCameraInfoMap.put(cameraId, cameraInfo);
563         return cameraInfo;
564     }
565 
createCameraInfo(CameraId cameraId)566     private CameraInfo createCameraInfo(CameraId cameraId) {
567         CameraCharacteristics chars = getCameraCharacteristicsOrNull(cameraId.mainCameraId);
568         CameraCharacteristics physicalChars = getCameraCharacteristicsOrNull(
569                 cameraId.physicalCameraId != null ? cameraId.physicalCameraId
570                         : cameraId.mainCameraId);
571         // Retrieves the physical camera zoom ratio range from the vendor camera prefs.
572         Range<Float> zoomRatioRange = mRroCameraInfo.getPhysicalCameraZoomRatioRange(cameraId);
573         // Retrieves the physical camera zoom ratio range if no custom data is found.
574         if (zoomRatioRange == null) {
575             zoomRatioRange = getCameraCharacteristic(physicalChars,
576                     CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE);
577         }
578 
579         // Logical cameras will be STANDARD category by default. For physical cameras, their
580         // categories should be specified by the vendor. If the category is not provided, use
581         // focal lengths to determine the physical camera's category.
582         CameraCategory cameraCategory = CameraCategory.STANDARD;
583         if (cameraId.physicalCameraId != null) {
584             cameraCategory = mRroCameraInfo.getCameraCategory(cameraId);
585             if (cameraCategory == CameraCategory.UNKNOWN) {
586                 if (physicalChars != null) {
587                     cameraCategory = calculateCameraCategoryByFocalLengths(physicalChars);
588                 }
589             }
590         }
591         // We should consider using a builder pattern here if the parameters grow a lot.
592         return new CameraInfo(
593                 new CameraId(cameraId.mainCameraId, cameraId.physicalCameraId),
594                 getCameraCharacteristic(chars, CameraCharacteristics.LENS_FACING),
595                 getCameraCharacteristic(chars, CameraCharacteristics.SENSOR_ORIENTATION),
596                 zoomRatioRange,
597                 getCameraCharacteristic(chars,
598                         CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE),
599                 isFacePrioritySupported(chars),
600                 isStreamUseCaseSupported(chars),
601                 cameraCategory
602         );
603     }
604 
calculateCameraCategoryByFocalLengths( CameraCharacteristics characteristics)605     private CameraCategory calculateCameraCategoryByFocalLengths(
606             CameraCharacteristics characteristics) {
607         float[] focalLengths = characteristics.get(
608                 CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS);
609 
610         if (focalLengths == null) {
611             return CameraCategory.UNKNOWN;
612         }
613 
614         final int standardCamera = 0x1;
615         final int telephotoCamera = 0x2;
616         final int wideAngleCamera = 0x4;
617         final int ultraWideCamera = 0x8;
618 
619         int cameraCategory = 0;
620 
621         for (float focalLength : focalLengths) {
622             if (focalLength >= 50) {
623                 cameraCategory |= telephotoCamera;
624             } else if (focalLength >= 30) {
625                 cameraCategory |= standardCamera;
626             } else if (focalLength >= 20) {
627                 cameraCategory |= wideAngleCamera;
628             } else {
629                 cameraCategory |= ultraWideCamera;
630             }
631         }
632 
633         return switch (cameraCategory) {
634             case telephotoCamera -> CameraCategory.TELEPHOTO;
635             case wideAngleCamera -> CameraCategory.WIDE_ANGLE;
636             case ultraWideCamera -> CameraCategory.ULTRA_WIDE;
637             default -> CameraCategory.STANDARD;
638         };
639     }
640 
641     @Nullable
getCameraCharacteristic(CameraCharacteristics chars, CameraCharacteristics.Key<T> key)642     private static <T> T getCameraCharacteristic(CameraCharacteristics chars,
643             CameraCharacteristics.Key<T> key) {
644         return chars.get(key);
645     }
646 
647     @Nullable
getCameraCharacteristicsOrNull(String cameraId)648     private CameraCharacteristics getCameraCharacteristicsOrNull(String cameraId) {
649         try {
650             CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(
651                     cameraId);
652             return characteristics;
653         } catch (CameraAccessException e) {
654             Log.e(TAG, "Failed to get characteristics for camera " + cameraId
655                     + ".", e);
656         }
657       return null;
658     }
659 
660     @Nullable
getCameraCharacteristic(String cameraId, CameraCharacteristics.Key<T> key)661     private <T> T getCameraCharacteristic(String cameraId, CameraCharacteristics.Key<T> key) {
662         CameraCharacteristics chars = getCameraCharacteristicsOrNull(cameraId);
663         if (chars != null) {
664             return chars.get(key);
665         }
666         return null;
667     }
668 
setWebcamStreamConfig(boolean mjpeg, int width, int height, int fps)669     public void setWebcamStreamConfig(boolean mjpeg, int width, int height, int fps) {
670         if (VERBOSE) {
671             Log.v(TAG, "Set stream config service : mjpeg  ? " + mjpeg + " width" + width +
672                     " height " + height + " fps " + fps);
673         }
674         synchronized (mSerializationLock) {
675             long usage = HardwareBuffer.USAGE_CPU_READ_OFTEN;
676             mStreamConfigs = new StreamConfigs(mjpeg, width, height, fps);
677             synchronized (mImgReaderLock) {
678                 if (mImgReader != null) {
679                     mImgReader.close();
680                 }
681                 mImgReader = new ImageReader.Builder(width, height)
682                         .setMaxImages(MAX_BUFFERS)
683                         .setDefaultHardwareBufferFormat(HardwareBuffer.YCBCR_420_888)
684                         .setUsage(usage)
685                         .build();
686                 mImgReader.setOnImageAvailableListener(mOnImageAvailableListener,
687                         mImageReaderHandler);
688             }
689         }
690     }
691 
fillImageWithCameraAccessBlockedLogo(Image img)692     private void fillImageWithCameraAccessBlockedLogo(Image img) {
693         Image.Plane[] planes = img.getPlanes();
694 
695         ByteBuffer rgbaBuffer = planes[0].getBuffer();
696         // Copy the bitmap array
697         rgbaBuffer.put(mCombinedBitmapBytes);
698     }
699 
handleOnCameraAvailable()700     private void handleOnCameraAvailable() {
701         // Offload to mServiceEventsExecutor since any camera operations which require
702         // mSerializationLock should be performed on mServiceEventsExecutor thread.
703         mServiceEventsExecutor.execute(() -> {
704             synchronized (mSerializationLock) {
705                 if (mCameraDevice != null) {
706                     return;
707                 }
708                 stopShowingCameraUnavailableLogo();
709                 setWebcamStreamConfig(mStreamConfigs.isMjpeg, mStreamConfigs.width,
710                         mStreamConfigs.height, mStreamConfigs.fps);
711                 startWebcamStreamingNoOffload();
712             }
713         });
714     }
715 
716     /**
717      * Stops showing the camera unavailable logo. Should only be called on the
718      * mServiceEventsExecutor thread
719      */
stopShowingCameraUnavailableLogo()720     private void stopShowingCameraUnavailableLogo() {
721         // destroy the executor since camera getting evicted would be a rare occurrence
722         synchronized (mSerializationLock) {
723             if (mImageWriterEventsExecutor != null) {
724                 mImageWriterEventsExecutor.shutdown();
725             }
726             mImageWriterEventsExecutor = null;
727             mImageWriter = null;
728             mCombinedBitmapBytes = null;
729         }
730     }
731 
startShowingCameraUnavailableLogo()732     private void startShowingCameraUnavailableLogo() {
733         mServiceEventsExecutor.execute(() -> {
734            startShowingCameraUnavailableLogoNoOffload();
735         });
736     }
737 
738     /**
739      * Starts showing the camera unavailable logo. Should only be called on the
740      * mServiceEventsExecutor thread
741      */
startShowingCameraUnavailableLogoNoOffload()742     private void startShowingCameraUnavailableLogoNoOffload() {
743         synchronized (mSerializationLock) {
744             setupBitmaps(mStreamConfigs.width, mStreamConfigs.height);
745             long usage = HardwareBuffer.USAGE_CPU_READ_OFTEN;
746             synchronized (mImgReaderLock) {
747                 if (mImgReader != null) {
748                     mImgReader.close();
749                 }
750                 mImgReader = new ImageReader.Builder(
751                          mStreamConfigs.width, mStreamConfigs.height)
752                         .setMaxImages(MAX_BUFFERS)
753                         .setDefaultHardwareBufferFormat(HardwareBuffer.RGBA_8888)
754                         .setUsage(usage)
755                         .build();
756 
757                 mImgReader.setOnImageAvailableListener(mOnImageAvailableListener,
758                         mImageReaderHandler);
759             }
760             mImageWriter = ImageWriter.newInstance(mImgReader.getSurface(), MAX_BUFFERS);
761             // In effect, the webcam stream has started
762             mImageWriterEventsExecutor = Executors.newScheduledThreadPool(1);
763             mImageWriterEventsExecutor.scheduleAtFixedRate(new Runnable() {
764                 @Override
765                 public void run() {
766                     Image img = mImageWriter.dequeueInputImage();
767                     // Fill in image
768                     fillImageWithCameraAccessBlockedLogo(img);
769                     mImageWriter.queueInputImage(img);
770                 }
771             }, /*initialDelay*/0, /*fps period ms*/1000 / mStreamConfigs.fps,
772                     TimeUnit.MILLISECONDS);
773         }
774     }
775 
776     /**
777      * Must be called with mSerializationLock held on mServiceExecutor thread.
778      */
openCameraBlocking()779     private void openCameraBlocking() {
780         if (mCameraManager == null) {
781             Log.e(TAG, "CameraManager is not initialized, aborting");
782             return;
783         }
784         if (mCameraId == null) {
785             Log.e(TAG, "No camera is found on the device, aborting");
786             return;
787         }
788         if (mCameraDevice != null) {
789             mCameraDevice.close();
790             mCameraDevice = null;
791         }
792         try {
793             mCameraManager.openCamera(mCameraId.mainCameraId, mCameraCallbacksExecutor,
794                     mCameraStateCallback);
795         } catch (CameraAccessException e) {
796             Log.e(TAG, "openCamera failed for cameraId : " + mCameraId.mainCameraId, e);
797             startShowingCameraUnavailableLogo();
798         }
799         mReadyToStream.block();
800         mReadyToStream.close();
801     }
802 
setupPreviewOnlyStreamLocked(SurfaceTexture previewSurfaceTexture)803     private void setupPreviewOnlyStreamLocked(SurfaceTexture previewSurfaceTexture) {
804         setupPreviewOnlyStreamLocked(new Surface(previewSurfaceTexture));
805     }
806 
setupPreviewOnlyStreamLocked(Surface previewSurface)807     private void setupPreviewOnlyStreamLocked(Surface previewSurface) {
808         mPreviewSurface = previewSurface;
809         openCameraBlocking();
810         mPreviewRequestBuilder = createInitialPreviewRequestBuilder(mPreviewSurface);
811         if (mPreviewRequestBuilder == null) {
812             return;
813         }
814         mPreviewOutputConfiguration = new OutputConfiguration(mPreviewSurface);
815         if (mCameraInfo.isStreamUseCaseSupported() && shouldUseStreamUseCase()) {
816             mPreviewOutputConfiguration.setStreamUseCase(
817                     CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW);
818         }
819 
820         // So that we don't have to reconfigure if / when the preview activity is turned off /
821         // on again.
822         mWebcamOutputConfiguration = null;
823         mOutputConfigurations = Arrays.asList(mPreviewOutputConfiguration);
824         mCurrentState = CameraStreamingState.PREVIEW_STREAMING;
825         createCaptureSessionBlocking();
826     }
827 
createInitialPreviewRequestBuilder(Surface targetSurface)828     private CaptureRequest.Builder createInitialPreviewRequestBuilder(Surface targetSurface) {
829         CaptureRequest.Builder captureRequestBuilder;
830         try {
831             captureRequestBuilder =
832                     mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
833         } catch (CameraAccessException e) {
834             Log.e(TAG, "createCaptureRequest failed", e);
835             stopStreamingAltogetherLocked();
836             startShowingCameraUnavailableLogoNoOffload();
837             return null;
838         }
839 
840         int currentFps = 30;
841         if (mStreamConfigs != null) {
842             currentFps = mStreamConfigs.fps;
843         }
844         Range<Integer> fpsRange;
845         if (currentFps != 0) {
846             fpsRange = new Range<>(currentFps, currentFps);
847         } else {
848             fpsRange = new Range<>(30, 30);
849         }
850         captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
851         captureRequestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, mZoomRatio);
852         captureRequestBuilder.addTarget(targetSurface);
853         captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
854                 CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
855         if (mCameraInfo.isFacePrioritySupported()) {
856             captureRequestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE,
857                     CaptureRequest.CONTROL_SCENE_MODE_FACE_PRIORITY);
858         }
859 
860         return captureRequestBuilder;
861     }
862 
checkArrayContains(@ullable int[] array, int value)863     private static boolean checkArrayContains(@Nullable int[] array, int value) {
864         if (array == null) {
865             return false;
866         }
867         for (int val : array) {
868             if (val == value) {
869                 return true;
870             }
871         }
872 
873         return false;
874     }
875 
isBackwardCompatible(CameraCharacteristics chars)876     private static boolean isBackwardCompatible(CameraCharacteristics chars) {
877         int[] availableCapabilities = getCameraCharacteristic(chars,
878                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
879         return checkArrayContains(availableCapabilities,
880                 CaptureRequest.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
881     }
882 
isFacePrioritySupported(CameraCharacteristics chars)883     private static boolean isFacePrioritySupported(CameraCharacteristics chars) {
884         int[] availableSceneModes = getCameraCharacteristic(chars,
885                 CameraCharacteristics.CONTROL_AVAILABLE_SCENE_MODES);
886         return checkArrayContains(
887                 availableSceneModes, CaptureRequest.CONTROL_SCENE_MODE_FACE_PRIORITY);
888     }
889 
isStreamUseCaseSupported(CameraCharacteristics chars)890     private static boolean isStreamUseCaseSupported(CameraCharacteristics chars) {
891         int[] caps = getCameraCharacteristic(chars,
892                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
893         return checkArrayContains(
894                 caps, CameraMetadata.REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE);
895     }
896 
897     // CameraManager which populates the mandatory streams uses the same computation.
getDisplayPreviewSize()898     private Size getDisplayPreviewSize() {
899         Size ret = new Size(1920, 1080);
900         DisplayManager displayManager =
901                 mContext.getSystemService(DisplayManager.class);
902         Display display = displayManager.getDisplay(Display.DEFAULT_DISPLAY);
903         if (display != null) {
904             Point sz = new Point();
905             display.getRealSize(sz);
906             int width = sz.x;
907             int height = sz.y;
908 
909             if (height > width) {
910                 height = width;
911                 width = sz.y;
912             }
913             ret = new Size(width, height);
914         } else {
915             Log.e(TAG, "Invalid default display!");
916         }
917         return ret;
918     }
919 
920     // Check whether we satisfy mandatory stream combinations for stream use use case
shouldUseStreamUseCase()921     private boolean shouldUseStreamUseCase() {
922         // Webcam stream - YUV should be <= 1440p
923         // Preview stream should be <= PREVIEW - which is already guaranteed by
924         // getSuitablePreviewSize()
925         if (mWebcamOutputConfiguration != null && mStreamConfigs != null &&
926                 (mStreamConfigs.width * mStreamConfigs.height) > (1920 * 1440)) {
927             return false;
928         }
929         return true;
930     }
931 
setupPreviewStreamAlongsideWebcamStreamLocked( SurfaceTexture previewSurfaceTexture)932     private void setupPreviewStreamAlongsideWebcamStreamLocked(
933             SurfaceTexture previewSurfaceTexture) {
934         setupPreviewStreamAlongsideWebcamStreamLocked(new Surface(previewSurfaceTexture));
935     }
936 
setupPreviewStreamAlongsideWebcamStreamLocked(Surface previewSurface)937     private void setupPreviewStreamAlongsideWebcamStreamLocked(Surface previewSurface) {
938         if (VERBOSE) {
939             Log.v(TAG, "setupPreviewAlongsideWebcam");
940         }
941         mPreviewSurface = previewSurface;
942         mPreviewOutputConfiguration = new OutputConfiguration(mPreviewSurface);
943         if (mCameraInfo.isStreamUseCaseSupported() && shouldUseStreamUseCase()) {
944             mPreviewOutputConfiguration.setStreamUseCase(
945                     CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW);
946         }
947 
948         mPreviewRequestBuilder.addTarget(mPreviewSurface);
949         mOutputConfigurations = Arrays.asList(mPreviewOutputConfiguration,
950                 mWebcamOutputConfiguration);
951 
952         mCurrentState = CameraStreamingState.PREVIEW_AND_WEBCAM_STREAMING;
953         createCaptureSessionBlocking();
954     }
955 
startPreviewStreaming(SurfaceTexture surfaceTexture, Size previewSize, Consumer<Size> previewSizeChangeListener)956     public void startPreviewStreaming(SurfaceTexture surfaceTexture, Size previewSize,
957             Consumer<Size> previewSizeChangeListener) {
958         // Started on a background thread since we don't want to be blocking either the activity's
959         // or the service's main thread (we call blocking camera open in these methods internally)
960         mServiceEventsExecutor.execute(new Runnable() {
961             @Override
962             public void run() {
963                 synchronized (mSerializationLock) {
964                     mPreviewSurfaceTexture = surfaceTexture;
965                     mPreviewSize = previewSize;
966                     mPreviewSizeChangeListener = previewSizeChangeListener;
967                     switch (mCurrentState) {
968                         case NO_STREAMING:
969                             setupPreviewOnlyStreamLocked(surfaceTexture);
970                             break;
971                         case WEBCAM_STREAMING:
972                             setupPreviewStreamAlongsideWebcamStreamLocked(surfaceTexture);
973                             break;
974                         case PREVIEW_STREAMING:
975                         case PREVIEW_AND_WEBCAM_STREAMING:
976                             Log.e(TAG, "Incorrect current state for startPreviewStreaming " +
977                                     mCurrentState);
978                     }
979                 }
980             }
981         });
982     }
983 
setupWebcamOnlyStreamAndOpenCameraLocked()984     private void setupWebcamOnlyStreamAndOpenCameraLocked() {
985         // Setup outputs
986         if (VERBOSE) {
987             Log.v(TAG, "setupWebcamOnly");
988         }
989         Surface surface = mImgReader.getSurface();
990         openCameraBlocking();
991         mCurrentState = CameraStreamingState.WEBCAM_STREAMING;
992         if (mCameraDevice != null) {
993             mPreviewRequestBuilder = createInitialPreviewRequestBuilder(surface);
994             if (mPreviewRequestBuilder == null) {
995                 Log.e(TAG, "Failed to create the webcam stream.");
996                 return;
997             }
998             mWebcamOutputConfiguration = new OutputConfiguration(surface);
999             if (mCameraInfo.isStreamUseCaseSupported() && shouldUseStreamUseCase()) {
1000                 mWebcamOutputConfiguration.setStreamUseCase(
1001                         CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL);
1002             }
1003             mOutputConfigurations = Arrays.asList(mWebcamOutputConfiguration);
1004             createCaptureSessionBlocking();
1005         }
1006     }
1007 
setupWebcamStreamAndReconfigureSessionLocked()1008     private void setupWebcamStreamAndReconfigureSessionLocked() {
1009         // Setup outputs
1010         if (VERBOSE) {
1011             Log.v(TAG, "setupWebcamStreamAndReconfigureSession");
1012         }
1013         Surface surface = mImgReader.getSurface();
1014         mPreviewRequestBuilder.addTarget(surface);
1015         mWebcamOutputConfiguration = new OutputConfiguration(surface);
1016         if (mCameraInfo.isStreamUseCaseSupported() && shouldUseStreamUseCase()) {
1017             mWebcamOutputConfiguration.setStreamUseCase(
1018                     CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL);
1019         }
1020         mCurrentState = CameraStreamingState.PREVIEW_AND_WEBCAM_STREAMING;
1021         mOutputConfigurations =
1022                 Arrays.asList(mWebcamOutputConfiguration, mPreviewOutputConfiguration);
1023         createCaptureSessionBlocking();
1024     }
1025 
1026     /**
1027      * Adjust preview output configuration when preview size is changed.
1028      */
adjustPreviewOutputConfiguration()1029     private void adjustPreviewOutputConfiguration() {
1030         if (mPreviewSurfaceTexture == null || mPreviewSurface == null) {
1031             return;
1032         }
1033 
1034         Size suitablePreviewSize = getSuitablePreviewSize();
1035         // If the required preview size is the same, don't need to adjust the output configuration
1036         if (Objects.equals(suitablePreviewSize, mPreviewSize)) {
1037             return;
1038         }
1039 
1040         // Removes the original preview surface
1041         mPreviewRequestBuilder.removeTarget(mPreviewSurface);
1042         // Adjusts the SurfaceTexture default buffer size to match the new preview size
1043         mPreviewSurfaceTexture.setDefaultBufferSize(suitablePreviewSize.getWidth(),
1044                 suitablePreviewSize.getHeight());
1045         mPreviewSize = suitablePreviewSize;
1046         mPreviewRequestBuilder.addTarget(mPreviewSurface);
1047         mPreviewOutputConfiguration = new OutputConfiguration(mPreviewSurface);
1048         if (mCameraInfo.isStreamUseCaseSupported()) {
1049                 mPreviewOutputConfiguration.setStreamUseCase(
1050                         CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW);
1051         }
1052 
1053         mOutputConfigurations = mWebcamOutputConfiguration != null ? Arrays.asList(
1054                 mWebcamOutputConfiguration, mPreviewOutputConfiguration) : Arrays.asList(
1055                 mPreviewOutputConfiguration);
1056 
1057         // Invokes the preview size change listener so that the preview activity can adjust its
1058         // size and scale to match the new size.
1059         if (mPreviewSizeChangeListener != null) {
1060             mPreviewSizeChangeListener.accept(suitablePreviewSize);
1061         }
1062     }
startWebcamStreaming()1063     public void startWebcamStreaming() {
1064         mServiceEventsExecutor.execute(() -> {
1065             // Started on a background thread since we don't want to be blocking the service's main
1066             // thread (we call blocking camera open in these methods internally)
1067             startWebcamStreamingNoOffload();
1068         });
1069     }
1070 
1071     /**
1072      * Starts webcam streaming. This should only be called on the service events executor thread.
1073      */
startWebcamStreamingNoOffload()1074     public void startWebcamStreamingNoOffload() {
1075         mStartCaptureWebcamStream.set(true);
1076         synchronized (mSerializationLock) {
1077             synchronized (mImgReaderLock) {
1078                 if (mImgReader == null) {
1079                     Log.e(TAG,
1080                             "Webcam streaming requested without ImageReader initialized");
1081                     return;
1082                 }
1083             }
1084             switch (mCurrentState) {
1085                 // Our current state could also be webcam streaming and we want to start the
1086                 // camera again - example : we never had the camera and were streaming the
1087                 // camera unavailable logo - when camera becomes available we actually want to
1088                 // start streaming camera frames.
1089                 case WEBCAM_STREAMING:
1090                 case NO_STREAMING:
1091                     setupWebcamOnlyStreamAndOpenCameraLocked();
1092                     break;
1093                 case PREVIEW_STREAMING:
1094                     adjustPreviewOutputConfiguration();
1095                     // Its okay to recreate an already running camera session with
1096                     // preview since the 'glitch' that we see will not be on the webcam
1097                     // stream.
1098                     setupWebcamStreamAndReconfigureSessionLocked();
1099                     break;
1100                 case PREVIEW_AND_WEBCAM_STREAMING:
1101                     if (mCameraDevice == null) {
1102                         // We had been evicted and were streaming fake webcam streams,
1103                         // preview activity  was selected, and then camera became available.
1104                         setupWebcamOnlyStreamAndOpenCameraLocked();
1105                         if (mPreviewSurface != null) {
1106                             setupPreviewStreamAlongsideWebcamStreamLocked(mPreviewSurface);
1107                         }
1108                     } else {
1109                         Log.e(TAG, "Incorrect current state for startWebcamStreaming "
1110                                 + mCurrentState + " since webcam and preview already streaming");
1111                     }
1112             }
1113         }
1114     }
1115 
stopPreviewStreamOnlyLocked()1116     private void stopPreviewStreamOnlyLocked() {
1117         mPreviewRequestBuilder.removeTarget(mPreviewSurface);
1118         mOutputConfigurations = Arrays.asList(mWebcamOutputConfiguration);
1119         createCaptureSessionBlocking();
1120         mPreviewSurfaceTexture = null;
1121         mPreviewSizeChangeListener = null;
1122         mPreviewSurface = null;
1123         mPreviewSize = null;
1124         mCurrentState = CameraStreamingState.WEBCAM_STREAMING;
1125     }
1126 
stopPreviewStreaming()1127     public void stopPreviewStreaming() {
1128         // Started on a background thread since we don't want to be blocking either the activity's
1129         // or the service's main thread (we call blocking camera open in these methods internally)
1130         mServiceEventsExecutor.execute(new Runnable() {
1131             @Override
1132             public void run() {
1133                 synchronized (mSerializationLock) {
1134                     switch (mCurrentState) {
1135                         case PREVIEW_AND_WEBCAM_STREAMING:
1136                             stopPreviewStreamOnlyLocked();
1137                             break;
1138                         case PREVIEW_STREAMING:
1139                             stopStreamingAltogetherLocked();
1140                             break;
1141                         case NO_STREAMING:
1142                         case WEBCAM_STREAMING:
1143                             Log.e(TAG,
1144                                     "Incorrect current state for stopPreviewStreaming " +
1145                                             mCurrentState);
1146                     }
1147                 }
1148             }
1149         });
1150     }
1151 
stopWebcamStreamOnlyLocked()1152     private void stopWebcamStreamOnlyLocked() {
1153         // Re-configure session to have only the preview stream
1154         // Setup outputs
1155         mPreviewRequestBuilder.removeTarget(mImgReader.getSurface());
1156         mOutputConfigurations =
1157                 Arrays.asList(mPreviewOutputConfiguration);
1158         mCurrentState = CameraStreamingState.PREVIEW_STREAMING;
1159         mWebcamOutputConfiguration = null;
1160         createCaptureSessionBlocking();
1161     }
1162 
stopStreamingAltogetherLocked()1163     private void stopStreamingAltogetherLocked() {
1164         stopStreamingAltogetherLocked(/*closeImageReader*/true);
1165     }
1166 
stopStreamingAltogetherLocked(boolean closeImageReader)1167     private void stopStreamingAltogetherLocked(boolean closeImageReader) {
1168         if (VERBOSE) {
1169             Log.v(TAG, "StopStreamingAltogether");
1170         }
1171         mCurrentState = CameraStreamingState.NO_STREAMING;
1172         synchronized (mImgReaderLock) {
1173             if (closeImageReader && mImgReader != null) {
1174                 mImgReader.close();
1175                 mImgReader = null;
1176             }
1177         }
1178         if (mCameraDevice != null) {
1179             mCameraDevice.close();
1180         }
1181         mCameraDevice = null;
1182         mWebcamOutputConfiguration = null;
1183         mPreviewOutputConfiguration = null;
1184         mTapToFocusPoints = null;
1185         mReadyToStream.close();
1186     }
1187 
stopWebcamStreaming()1188     public void stopWebcamStreaming() {
1189         // Started on a background thread since we don't want to be blocking the service's main
1190         // thread (we call blocking camera open in these methods internally)
1191         mServiceEventsExecutor.execute(new Runnable() {
1192             @Override
1193             public void run() {
1194                 mStartCaptureWebcamStream.set(false);
1195                 synchronized (mSerializationLock) {
1196                     switch (mCurrentState) {
1197                         case PREVIEW_AND_WEBCAM_STREAMING:
1198                             stopWebcamStreamOnlyLocked();
1199                             break;
1200                         case WEBCAM_STREAMING:
1201                             stopStreamingAltogetherLocked();
1202                             break;
1203                         case PREVIEW_STREAMING:
1204                             Log.e(TAG,
1205                                     "Incorrect current state for stopWebcamStreaming " +
1206                                             mCurrentState);
1207                             return;
1208                     }
1209 
1210                     if (mImageWriterEventsExecutor != null) {
1211                         stopShowingCameraUnavailableLogo();
1212                     }
1213                 }
1214             }
1215         });
1216     }
1217 
startBackgroundThread()1218     private void startBackgroundThread() {
1219         HandlerThread imageReaderThread = new HandlerThread("SdkCameraFrameProviderThread");
1220         imageReaderThread.start();
1221         mImageReaderHandler = new Handler(imageReaderThread.getLooper());
1222         // We need two executor threads since the surface texture add / remove calls from the fg
1223         // service are going to be served on the main thread. To not wait on capture session
1224         // creation, onCaptureSequenceCompleted we need a new thread to cater to preview surface
1225         // addition / removal.
1226         // b/277099495 has additional context.
1227         mCameraCallbacksExecutor = Executors.newSingleThreadExecutor();
1228         mServiceEventsExecutor = Executors.newSingleThreadExecutor();
1229     }
1230 
createCaptureSessionBlocking()1231     private void createCaptureSessionBlocking() {
1232         if (mCameraId.physicalCameraId != null) {
1233             for (OutputConfiguration config : mOutputConfigurations) {
1234                 config.setPhysicalCameraId(mCameraId.physicalCameraId);
1235             }
1236         }
1237         // In case we're fake streaming camera frames.
1238         if (mCameraDevice == null) {
1239             return;
1240         }
1241         try {
1242             mCameraDevice.createCaptureSession(
1243                     new SessionConfiguration(
1244                             SessionConfiguration.SESSION_REGULAR, mOutputConfigurations,
1245                             mCameraCallbacksExecutor, mCameraCaptureSessionCallback));
1246             mCaptureSessionReady.block();
1247             mCaptureSessionReady.close();
1248         } catch (CameraAccessException e) {
1249             Log.e(TAG, "createCaptureSession failed", e);
1250             stopStreamingAltogetherLocked();
1251             startShowingCameraUnavailableLogoNoOffload();
1252         }
1253     }
1254 
returnImage(long timestamp)1255     public void returnImage(long timestamp) {
1256         ImageAndBuffer imageAndBuffer = mImageMap.get(timestamp);
1257         if (imageAndBuffer == null) {
1258             Log.e(TAG, "Image with timestamp " + timestamp +
1259                     " was never encoded / already returned");
1260             return;
1261         }
1262         imageAndBuffer.buffer.close();
1263         imageAndBuffer.image.close();
1264         mImageMap.remove(timestamp);
1265         if (VERBOSE) {
1266             Log.v(TAG, "Returned image " + timestamp);
1267         }
1268     }
1269 
1270     /**
1271      * Returns the {@link CameraInfo} of the working camera.
1272      */
getCameraInfo()1273     public CameraInfo getCameraInfo() {
1274         return mCameraInfo;
1275     }
1276 
1277     /**
1278      * Sets the new zoom ratio setting to the working camera.
1279      */
setZoomRatio(float zoomRatio)1280     public void setZoomRatio(float zoomRatio) {
1281         mZoomRatio = zoomRatio;
1282         mServiceEventsExecutor.execute(() -> {
1283             synchronized (mSerializationLock) {
1284                 if (mCameraDevice == null || mCaptureSession == null) {
1285                     return;
1286                 }
1287 
1288                 try {
1289                     mPreviewRequestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomRatio);
1290                     mCaptureSession.setSingleRepeatingRequest(mPreviewRequestBuilder.build(),
1291                             mCameraCallbacksExecutor, mCaptureCallback);
1292                     mUserPrefs.storeZoomRatio(mCameraId.toString(), mZoomRatio);
1293                 } catch (CameraAccessException e) {
1294                     Log.e(TAG, "Failed to set zoom ratio to the working camera.", e);
1295                 }
1296             }
1297         });
1298     }
1299 
1300     /**
1301      * Returns current zoom ratio setting.
1302      */
getZoomRatio()1303     public float getZoomRatio() {
1304         return mZoomRatio;
1305     }
1306 
1307     /**
1308      * Toggles camera between the back and front cameras.
1309      *
1310      * The new camera is set up and configured asynchronously, but the camera state (as queried by
1311      * other methods in {@code CameraController}) is updated synchronously. So querying camera
1312      * state and metadata immediately after this method returns, returns values associated with the
1313      * new camera, even if the new camera hasn't started streaming.
1314      */
toggleCamera()1315     public void toggleCamera() {
1316         synchronized (mSerializationLock) {
1317             CameraId newCameraId;
1318 
1319             if (Objects.equals(mCameraId, mBackCameraId)) {
1320                 newCameraId = mFrontCameraId;
1321             } else {
1322                 newCameraId = mBackCameraId;
1323             }
1324 
1325             switchCamera(newCameraId);
1326         }
1327     }
1328 
1329     /**
1330      * Switches current working camera to specific one.
1331      */
switchCamera(CameraId cameraId)1332     public void switchCamera(CameraId cameraId) {
1333         synchronized (mSerializationLock) {
1334             mCameraId = cameraId;
1335             mUserPrefs.storeCameraId(cameraId.toString());
1336             mCameraInfo = getOrCreateCameraInfo(mCameraId);
1337             mZoomRatio = mUserPrefs.fetchZoomRatio(mCameraId.toString(), /*defaultZoom*/ 1.0f);
1338             mTapToFocusPoints = null;
1339 
1340             // Stores the preferred back or front camera options
1341             if (mCameraInfo.getLensFacing() == CameraCharacteristics.LENS_FACING_BACK) {
1342                 mBackCameraId = mCameraId;
1343                 mUserPrefs.storeBackCameraId(mBackCameraId.toString());
1344             } else if (mCameraInfo.getLensFacing() == CameraCharacteristics.LENS_FACING_FRONT) {
1345                 mFrontCameraId = mCameraId;
1346                 mUserPrefs.storeFrontCameraId(mFrontCameraId.toString());
1347             }
1348         }
1349         mServiceEventsExecutor.execute(() -> {
1350             synchronized (mSerializationLock) {
1351                 if (mCameraDevice == null) {
1352                     // Its possible the preview screen is up before the camera device is opened.
1353                     return;
1354                 }
1355                 mCaptureSession.close();
1356                 if (mCameraInfo != null) {
1357                     mRotationProvider.updateSensorOrientation(mCameraInfo.getSensorOrientation(),
1358                             mCameraInfo.getLensFacing());
1359                 }
1360                 switch (mCurrentState) {
1361                     case WEBCAM_STREAMING:
1362                         setupWebcamOnlyStreamAndOpenCameraLocked();
1363                         break;
1364                     case PREVIEW_STREAMING:
1365                         // Preview size might change after toggling the camera.
1366                         adjustPreviewOutputConfiguration();
1367                         setupPreviewOnlyStreamLocked(mPreviewSurface);
1368                         break;
1369                     case PREVIEW_AND_WEBCAM_STREAMING:
1370                         setupWebcamOnlyStreamAndOpenCameraLocked();
1371                         // Preview size might change after toggling the camera.
1372                         adjustPreviewOutputConfiguration();
1373                         setupPreviewStreamAlongsideWebcamStreamLocked(mPreviewSurface);
1374                         break;
1375                 }
1376             }
1377         });
1378     }
1379 
1380     /**
1381      * Sets a {@link RotationUpdateListener} to monitor the rotation changes.
1382      */
setRotationUpdateListener(RotationUpdateListener listener)1383     public void setRotationUpdateListener(RotationUpdateListener listener) {
1384         mRotationUpdateListener = listener;
1385     }
1386 
1387     /**
1388      * Returns current rotation degrees value.
1389      */
getCurrentRotation()1390     public int getCurrentRotation() {
1391         return mRotationProvider.getRotation();
1392     }
1393 
1394     /**
1395      * Returns the best suitable output size for preview.
1396      *
1397      * <p>If the webcam stream doesn't exist, find the largest 16:9 supported output size which is
1398      * not larger than 1080p. If the webcam stream exists, find the largest supported output size
1399      * which matches the aspect ratio of the webcam stream size and is not larger than the
1400      * display size, 1080p, or the webcam stream resolution, whichever is smallest.
1401      */
getSuitablePreviewSize()1402     public Size getSuitablePreviewSize() {
1403         if (mCameraId == null) {
1404             Log.e(TAG, "No camera is found on the device.");
1405             return null;
1406         }
1407 
1408         final Size s1080p = new Size(1920, 1080);
1409         Size maxPreviewSize = s1080p;
1410 
1411         // For PREVIEW, choose the smallest of webcam stream size, display size, and 1080p. This
1412         // is guaranteed to be supported with a YUV stream.
1413         if (mImgReader != null) {
1414             maxPreviewSize = new Size(mImgReader.getWidth(), mImgReader.getHeight());
1415         }
1416 
1417         if (numPixels(maxPreviewSize) > numPixels(s1080p)) {
1418             maxPreviewSize = s1080p;
1419         }
1420 
1421         if (numPixels(maxPreviewSize) > numPixels(mDisplaySize)) {
1422             maxPreviewSize = mDisplaySize;
1423         }
1424 
1425         // If webcam stream exists, find an output size matching its aspect ratio. Otherwise, find
1426         // an output size with 16:9 aspect ratio.
1427         final Rational targetAspectRatio;
1428         if (mImgReader != null) {
1429             targetAspectRatio = new Rational(mImgReader.getWidth(), mImgReader.getHeight());
1430         } else {
1431             targetAspectRatio = new Rational(s1080p.getWidth(), s1080p.getHeight());
1432         }
1433 
1434         StreamConfigurationMap map = getCameraCharacteristic(mCameraId.mainCameraId,
1435                 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
1436 
1437         if (map == null) {
1438             Log.e(TAG, "Failed to retrieve StreamConfigurationMap. Return null preview size.");
1439             return null;
1440         }
1441 
1442         Size[] outputSizes = map.getOutputSizes(SurfaceTexture.class);
1443 
1444         if (outputSizes == null || outputSizes.length == 0) {
1445             Log.e(TAG, "Empty output sizes. Return null preview size.");
1446             return null;
1447         }
1448 
1449         Size finalMaxPreviewSize = maxPreviewSize;
1450         Size previewSize = Arrays.stream(outputSizes)
1451                 .filter(size -> targetAspectRatio.equals(
1452                         new Rational(size.getWidth(), size.getHeight())))
1453                 .filter(size -> numPixels(size) <= numPixels(finalMaxPreviewSize))
1454                 .max(Comparator.comparingInt(CameraController::numPixels))
1455                 .orElse(null);
1456 
1457         Log.d(TAG, "Suitable preview size is " + previewSize);
1458         return previewSize;
1459     }
1460 
numPixels(Size size)1461     private static int numPixels(Size size) {
1462         return size.getWidth() * size.getHeight();
1463     }
1464 
1465     /**
1466      * Trigger tap-to-focus operation for the specified normalized points mapping to the FOV.
1467      *
1468      * <p>The specified normalized points will be used to calculate the corresponding metering
1469      * rectangles that will be applied for AF, AE and AWB.
1470      */
tapToFocus(float[] normalizedPoint)1471     public void tapToFocus(float[] normalizedPoint) {
1472         mServiceEventsExecutor.execute(() -> {
1473             synchronized (mSerializationLock) {
1474                 if (mCameraDevice == null || mCaptureSession == null) {
1475                     return;
1476                 }
1477 
1478                 try {
1479                     mTapToFocusPoints = normalizedPoint;
1480                     MeteringRectangle[] meteringRectangles =
1481                             new MeteringRectangle[]{calculateMeteringRectangle(normalizedPoint)};
1482                     // Updates the metering rectangles to the repeating request
1483                     updateTapToFocusParameters(mPreviewRequestBuilder, meteringRectangles,
1484                             /* afTriggerStart */ false);
1485                     mCaptureSession.setSingleRepeatingRequest(mPreviewRequestBuilder.build(),
1486                             mCameraCallbacksExecutor, mCaptureCallback);
1487 
1488                     // Creates a capture request to trigger AF start for the metering rectangles.
1489                     CaptureRequest.Builder builder = mCameraDevice.createCaptureRequest(
1490                             CameraDevice.TEMPLATE_PREVIEW);
1491                     CaptureRequest previewCaptureRequest = mPreviewRequestBuilder.build();
1492 
1493                     for (CaptureRequest.Key<?> key : previewCaptureRequest.getKeys()) {
1494                         builder.set((CaptureRequest.Key) key, previewCaptureRequest.get(key));
1495                     }
1496 
1497                     if (mImgReader != null && previewCaptureRequest.containsTarget(
1498                             mImgReader.getSurface())) {
1499                         builder.addTarget(mImgReader.getSurface());
1500                     }
1501 
1502                     if (mPreviewSurface != null && previewCaptureRequest.containsTarget(
1503                             mPreviewSurface)) {
1504                         builder.addTarget(mPreviewSurface);
1505                     }
1506 
1507                     updateTapToFocusParameters(builder, meteringRectangles,
1508                             /* afTriggerStart */ true);
1509 
1510                     mCaptureSession.captureSingleRequest(builder.build(),
1511                             mCameraCallbacksExecutor, mCaptureCallback);
1512                 } catch (CameraAccessException e) {
1513                     Log.e(TAG, "Failed to execute tap-to-focus to the working camera.", e);
1514                 }
1515             }
1516         });
1517     }
1518 
1519     /**
1520      * Resets to the auto-focus mode.
1521      */
resetToAutoFocus()1522     public void resetToAutoFocus() {
1523         mServiceEventsExecutor.execute(() -> {
1524             synchronized (mSerializationLock) {
1525                 if (mCameraDevice == null || mCaptureSession == null) {
1526                     return;
1527                 }
1528                 mTapToFocusPoints = null;
1529 
1530                 // Resets to CONTINUOUS_VIDEO mode
1531                 mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
1532                         CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
1533                 // Clears the Af/Ae/Awb regions
1534                 mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, null);
1535                 mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, null);
1536                 mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AWB_REGIONS, null);
1537 
1538                 try {
1539                     mCaptureSession.setSingleRepeatingRequest(mPreviewRequestBuilder.build(),
1540                             mCameraCallbacksExecutor, mCaptureCallback);
1541                 } catch (CameraAccessException e) {
1542                     Log.e(TAG, "Failed to reset to auto-focus mode to the working camera.", e);
1543                 }
1544             }
1545         });
1546     }
1547 
1548     /**
1549      * Retrieves current tap-to-focus points.
1550      *
1551      * @return the normalized points or {@code null} if it is auto-focus mode currently.
1552      */
getTapToFocusPoints()1553     public float[] getTapToFocusPoints() {
1554         synchronized (mSerializationLock) {
1555             return mTapToFocusPoints == null ? null
1556                     : new float[]{mTapToFocusPoints[0], mTapToFocusPoints[1]};
1557         }
1558     }
1559 
1560     /**
1561      * Calculates the metering rectangle according to the normalized point.
1562      */
calculateMeteringRectangle(float[] normalizedPoint)1563     private MeteringRectangle calculateMeteringRectangle(float[] normalizedPoint) {
1564         CameraInfo cameraInfo = getCameraInfo();
1565         Rect activeArraySize = cameraInfo.getActiveArraySize();
1566         float halfMeteringRectWidth = (METERING_RECTANGLE_SIZE_RATIO * activeArraySize.width()) / 2;
1567         float halfMeteringRectHeight =
1568                 (METERING_RECTANGLE_SIZE_RATIO * activeArraySize.height()) / 2;
1569 
1570         Matrix matrix = new Matrix();
1571         matrix.postRotate(-cameraInfo.getSensorOrientation(), 0.5f, 0.5f);
1572         // Flips if current working camera is front camera
1573         if (cameraInfo.getLensFacing() == CameraCharacteristics.LENS_FACING_FRONT) {
1574             matrix.postScale(1, -1, 0.5f, 0.5f);
1575         }
1576         matrix.postScale(activeArraySize.width(), activeArraySize.height());
1577         float[] mappingPoints = new float[]{normalizedPoint[0], normalizedPoint[1]};
1578         matrix.mapPoints(mappingPoints);
1579 
1580         Rect meteringRegion = new Rect(
1581                 clamp((int) (mappingPoints[0] - halfMeteringRectWidth), 0,
1582                         activeArraySize.width()),
1583                 clamp((int) (mappingPoints[1] - halfMeteringRectHeight), 0,
1584                         activeArraySize.height()),
1585                 clamp((int) (mappingPoints[0] + halfMeteringRectWidth), 0,
1586                         activeArraySize.width()),
1587                 clamp((int) (mappingPoints[1] + halfMeteringRectHeight), 0,
1588                         activeArraySize.height())
1589         );
1590 
1591         return new MeteringRectangle(meteringRegion, MeteringRectangle.METERING_WEIGHT_MAX);
1592     }
1593 
clamp(int value, int min, int max)1594     private int clamp(int value, int min, int max) {
1595         return Math.min(Math.max(value, min), max);
1596     }
1597 
1598     /**
1599      * Updates tap-to-focus parameters to the capture request builder.
1600      *
1601      * @param builder            the capture request builder to apply the parameters
1602      * @param meteringRectangles the metering rectangles to apply to the capture request builder
1603      * @param afTriggerStart     sets CONTROL_AF_TRIGGER as CONTROL_AF_TRIGGER_START if this
1604      *                           parameter is {@code true}. Otherwise, sets nothing to
1605      *                           CONTROL_AF_TRIGGER.
1606      */
updateTapToFocusParameters(CaptureRequest.Builder builder, MeteringRectangle[] meteringRectangles, boolean afTriggerStart)1607     private void updateTapToFocusParameters(CaptureRequest.Builder builder,
1608             MeteringRectangle[] meteringRectangles, boolean afTriggerStart) {
1609         builder.set(CaptureRequest.CONTROL_AF_REGIONS, meteringRectangles);
1610         builder.set(CaptureRequest.CONTROL_AF_MODE,
1611                 CaptureRequest.CONTROL_AF_MODE_AUTO);
1612         builder.set(CaptureRequest.CONTROL_AE_REGIONS, meteringRectangles);
1613         builder.set(CaptureRequest.CONTROL_AE_MODE,
1614                 CaptureRequest.CONTROL_AE_MODE_ON);
1615         builder.set(CaptureRequest.CONTROL_AWB_REGIONS, meteringRectangles);
1616 
1617         if (afTriggerStart) {
1618             builder.set(CaptureRequest.CONTROL_AF_TRIGGER,
1619                     CaptureRequest.CONTROL_AF_TRIGGER_START);
1620         }
1621     }
1622 
1623     private static class ImageAndBuffer {
1624         public Image image;
1625         public HardwareBuffer buffer;
ImageAndBuffer(Image i, HardwareBuffer b)1626         public ImageAndBuffer(Image i, HardwareBuffer b) {
1627             image = i;
1628             buffer = b;
1629         }
1630     }
1631 
1632     /**
1633      * An interface to monitor the rotation changes.
1634      */
1635     interface RotationUpdateListener {
1636         /**
1637          * Called when the physical rotation of the device changes to cause the corresponding
1638          * rotation degrees value is changed.
1639          *
1640          * @param rotation the updated rotation degrees value.
1641          */
onRotationUpdated(int rotation)1642         void onRotationUpdated(int rotation);
1643     }
1644 }
1645