1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.android.cts.verifier.camera.its; 18 19 import static android.hardware.camera2.cts.CameraTestUtils.MaxStreamSizes; 20 21 import static org.mockito.ArgumentMatchers.eq; 22 import static org.mockito.Mockito.mock; 23 import static org.mockito.Mockito.timeout; 24 import static org.mockito.Mockito.verify; 25 26 import android.app.Activity; 27 import android.app.Notification; 28 import android.app.NotificationChannel; 29 import android.app.NotificationManager; 30 import android.app.Service; 31 import android.content.Context; 32 import android.content.Intent; 33 import android.content.pm.PackageManager; 34 import android.content.pm.ServiceInfo; 35 import android.graphics.Bitmap; 36 import android.graphics.BitmapFactory; 37 import android.graphics.ColorSpace; 38 import android.graphics.ImageFormat; 39 import android.graphics.Rect; 40 import android.graphics.SurfaceTexture; 41 import android.hardware.HardwareBuffer; 42 import android.hardware.Sensor; 43 import android.hardware.SensorEvent; 44 import android.hardware.SensorEventListener; 45 import android.hardware.SensorManager; 46 import android.hardware.SensorPrivacyManager; 47 import android.hardware.camera2.CameraAccessException; 48 import android.hardware.camera2.CameraCaptureSession; 49 import android.hardware.camera2.CameraCharacteristics; 50 import android.hardware.camera2.CameraDevice; 51 import android.hardware.camera2.CameraExtensionCharacteristics; 52 import android.hardware.camera2.CameraExtensionSession; 53 import android.hardware.camera2.CameraManager; 54 import android.hardware.camera2.CameraMetadata; 55 import android.hardware.camera2.CaptureFailure; 56 import android.hardware.camera2.CaptureRequest; 57 import android.hardware.camera2.CaptureResult; 58 import android.hardware.camera2.DngCreator; 59 import android.hardware.camera2.TotalCaptureResult; 60 import android.hardware.camera2.cts.CameraTestUtils; 61 import android.hardware.camera2.cts.PerformanceTest; 62 import android.hardware.camera2.cts.helpers.StaticMetadata; 63 import android.hardware.camera2.params.ColorSpaceProfiles; 64 import android.hardware.camera2.params.DynamicRangeProfiles; 65 import android.hardware.camera2.params.ExtensionSessionConfiguration; 66 import android.hardware.camera2.params.InputConfiguration; 67 import android.hardware.camera2.params.MeteringRectangle; 68 import android.hardware.camera2.params.OutputConfiguration; 69 import android.hardware.camera2.params.SessionConfiguration; 70 import android.hardware.camera2.params.StreamConfigurationMap; 71 import android.media.AudioAttributes; 72 import android.media.CamcorderProfile; 73 import android.media.Image; 74 import android.media.ImageReader; 75 import android.media.ImageWriter; 76 import android.media.MediaCodec; 77 import android.media.MediaCodecList; 78 import android.media.MediaFormat; 79 import android.media.MediaMuxer; 80 import android.media.MediaRecorder; 81 import android.os.Build; 82 import android.os.Bundle; 83 import android.os.ConditionVariable; 84 import android.os.Handler; 85 import android.os.HandlerThread; 86 import android.os.IBinder; 87 import android.os.Looper; 88 import android.os.SystemClock; 89 import android.os.SystemProperties; 90 import android.os.Vibrator; 91 import android.provider.MediaStore; 92 import android.util.Log; 93 import android.util.Pair; 94 import android.util.Range; 95 import android.util.Rational; 96 import android.util.Size; 97 import android.util.SparseArray; 98 import android.view.Surface; 99 import android.view.SurfaceHolder; 100 import android.view.WindowManager; 101 import android.view.WindowMetrics; 102 103 import androidx.annotation.NonNull; 104 import androidx.annotation.RequiresApi; 105 import androidx.test.InstrumentationRegistry; 106 107 import com.android.compatibility.common.util.ReportLog.Metric; 108 import com.android.cts.verifier.R; 109 import com.android.cts.verifier.camera.performance.CameraTestInstrumentation; 110 import com.android.cts.verifier.camera.performance.CameraTestInstrumentation.MetricListener; 111 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException; 112 import com.android.ex.camera2.blocking.BlockingExtensionSessionCallback; 113 import com.android.ex.camera2.blocking.BlockingSessionCallback; 114 import com.android.ex.camera2.blocking.BlockingStateCallback; 115 116 import com.google.common.primitives.Ints; 117 import com.google.common.primitives.Longs; 118 119 import org.json.JSONArray; 120 import org.json.JSONException; 121 import org.json.JSONObject; 122 import org.junit.runner.JUnitCore; 123 import org.junit.runner.Request; 124 import org.junit.runner.Result; 125 126 import java.io.BufferedReader; 127 import java.io.ByteArrayOutputStream; 128 import java.io.File; 129 import java.io.IOException; 130 import java.io.InputStreamReader; 131 import java.io.OutputStream; 132 import java.net.ServerSocket; 133 import java.net.Socket; 134 import java.nio.ByteBuffer; 135 import java.nio.ByteOrder; 136 import java.nio.FloatBuffer; 137 import java.nio.charset.Charset; 138 import java.text.SimpleDateFormat; 139 import java.util.ArrayList; 140 import java.util.Arrays; 141 import java.util.Collections; 142 import java.util.Comparator; 143 import java.util.Date; 144 import java.util.HashMap; 145 import java.util.HashSet; 146 import java.util.LinkedList; 147 import java.util.List; 148 import java.util.Locale; 149 import java.util.Map; 150 import java.util.Objects; 151 import java.util.Set; 152 import java.util.concurrent.BlockingQueue; 153 import java.util.concurrent.ConcurrentHashMap; 154 import java.util.concurrent.CountDownLatch; 155 import java.util.concurrent.Executor; 156 import java.util.concurrent.LinkedBlockingDeque; 157 import java.util.concurrent.LinkedBlockingQueue; 158 import java.util.concurrent.Semaphore; 159 import java.util.concurrent.TimeUnit; 160 import java.util.concurrent.atomic.AtomicInteger; 161 import java.util.stream.Collectors; 162 import java.util.stream.IntStream; 163 import java.util.stream.Stream; 164 165 public class ItsService extends Service implements SensorEventListener { 166 public static final String TAG = ItsService.class.getSimpleName(); 167 168 // Version number to keep host/server communication in sync 169 // This string must be in sync with python side device.py 170 // Updated when interface between script and ItsService is changed 171 private final String ITS_SERVICE_VERSION = "1.0"; 172 173 private final int SERVICE_NOTIFICATION_ID = 37; // random int that is unique within app 174 private NotificationChannel mChannel; 175 176 // Timeouts, in seconds. 177 private static final int TIMEOUT_CALLBACK = 20; 178 private static final int TIMEOUT_3A = 10; 179 180 // Time given for background requests to warm up pipeline 181 private static final long PIPELINE_WARMUP_TIME_MS = 2000; 182 183 // Time given PreviewRecorder to record green buffer frames 184 private static final long PADDED_FRAMES_MS = 600; 185 186 // State transition timeouts, in ms. 187 private static final long TIMEOUT_IDLE_MS = 2000; 188 private static final long TIMEOUT_IDLE_MS_EXTENSIONS = 20000; 189 private static final long TIMEOUT_SESSION_CLOSE = 3000; 190 private static final long TIMEOUT_SESSION_READY = 3000; 191 private static final long TIMEOUT_CAPTURE_PREVIEW_FRAME_SECONDS = 10; 192 193 // Timeout to wait for a capture result after the capture buffer has arrived, in ms. 194 private static final long TIMEOUT_CAP_RES = 2000; 195 196 private static final int MAX_CONCURRENT_READER_BUFFERS = 10; 197 198 // Supports at most RAW+YUV+JPEG, one surface each, plus optional background stream 199 private static final int MAX_NUM_OUTPUT_SURFACES = 4; 200 201 // Performance class R version number 202 private static final int PERFORMANCE_CLASS_R = Build.VERSION_CODES.R; 203 204 public static final int SERVERPORT = 6000; 205 206 private static final float EPISILON = 0.05f; 207 208 public static final String REGION_KEY = "regions"; 209 public static final String REGION_AE_KEY = "ae"; 210 public static final String REGION_AWB_KEY = "awb"; 211 public static final String REGION_AF_KEY = "af"; 212 public static final String LOCK_AE_KEY = "aeLock"; 213 public static final String LOCK_AWB_KEY = "awbLock"; 214 public static final String TRIGGER_KEY = "triggers"; 215 public static final String PHYSICAL_ID_KEY = "physicalId"; 216 public static final String TRIGGER_AE_KEY = "ae"; 217 public static final String TRIGGER_AF_KEY = "af"; 218 public static final String VIB_PATTERN_KEY = "pattern"; 219 public static final String EVCOMP_KEY = "evComp"; 220 public static final String FLASH_MODE_KEY = "flashMode"; 221 public static final String AUTO_FLASH_KEY = "autoFlash"; 222 public static final String ZOOM_RATIO_KEY = "zoomRatio"; 223 public static final String AUDIO_RESTRICTION_MODE_KEY = "mode"; 224 public static final String SETTINGS_KEY = "settings"; 225 public static final int AVAILABILITY_TIMEOUT_MS = 10; 226 227 private static final HashMap<Integer, String> CAMCORDER_PROFILE_QUALITIES_MAP; 228 static { 229 CAMCORDER_PROFILE_QUALITIES_MAP = new HashMap<>(); CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_480P, "480P")230 CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_480P, "480P"); CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_1080P, "1080P")231 CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_1080P, "1080P"); CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_2160P, "2160P")232 CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_2160P, "2160P"); CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_2K, "2k")233 CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_2K, "2k"); CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_4KDCI, "4KDC")234 CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_4KDCI, "4KDC"); CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_720P, "720P")235 CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_720P, "720P"); CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_8KUHD, "8KUHD")236 CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_8KUHD, "8KUHD"); CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_CIF, "CIF")237 CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_CIF, "CIF"); CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_HIGH, "HIGH")238 CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_HIGH, "HIGH"); CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_LOW, "LOW")239 CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_LOW, "LOW"); CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_QCIF, "QCIF")240 CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_QCIF, "QCIF"); CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_QHD, "QHD")241 CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_QHD, "QHD"); CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_QVGA, "QVGA")242 CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_QVGA, "QVGA"); CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_VGA, "VGA")243 CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_VGA, "VGA"); 244 } 245 246 private static final String JPEG_R_FMT = "jpeg_r"; 247 private static final String HEIC_ULTRAHDR_FMT = "heic_ultrahdr"; 248 private static HashMap<Integer, String> sFormatMap = new HashMap<>(); 249 static { sFormatMap.put(ImageFormat.PRIVATE, "priv")250 sFormatMap.put(ImageFormat.PRIVATE, "priv"); sFormatMap.put(ImageFormat.JPEG, "jpeg")251 sFormatMap.put(ImageFormat.JPEG, "jpeg"); sFormatMap.put(ImageFormat.JPEG_R, JPEG_R_FMT)252 sFormatMap.put(ImageFormat.JPEG_R, JPEG_R_FMT); sFormatMap.put(ImageFormat.YUV_420_888, "yuv")253 sFormatMap.put(ImageFormat.YUV_420_888, "yuv"); 254 } 255 256 private CameraManager mCameraManager = null; 257 private HandlerThread mCameraThread = null; 258 private Handler mCameraHandler = null; 259 private ItsService.BlockingCameraManager mBlockingCameraManager = null; 260 private BlockingStateCallback mCameraListener = null; 261 private CameraDevice mCamera = null; 262 private CameraCaptureSession mSession = null; 263 private CameraExtensionSession mExtensionSession = null; 264 private ImageReader[] mOutputImageReaders = null; 265 private ImageReader mThreeAOutputImageReader = null; 266 private SparseArray<String> mPhysicalStreamMap = new SparseArray<>(); 267 private SparseArray<Long> mStreamUseCaseMap = new SparseArray<>(); 268 private ImageReader mInputImageReader = null; 269 private ImageReader mExtensionPreviewImageReader = null; 270 private CameraCharacteristics mCameraCharacteristics = null; 271 private CameraExtensionCharacteristics mCameraExtensionCharacteristics = null; 272 private HashMap<String, CameraCharacteristics> mPhysicalCameraChars = new HashMap<>(); 273 private ItsUtils.ItsCameraIdList mItsCameraIdList = null; 274 // Declared here so that IntraPreviewAction can access configured CameraCaptureSession 275 private IntraPreviewAction mPreviewAction; 276 277 // To reuse mSession, track output configurations, image reader args, and session listener. 278 private List<OutputConfiguration> mCaptureOutputConfigs = new ArrayList<>(); 279 private ImageReaderArgs mImageReaderArgs = ImageReaderArgs.EMPTY; 280 private BlockingSessionCallback mSessionListener = null; 281 282 private Vibrator mVibrator = null; 283 284 private HandlerThread[] mSaveThreads = new HandlerThread[MAX_NUM_OUTPUT_SURFACES]; 285 private Handler[] mSaveHandlers = new Handler[MAX_NUM_OUTPUT_SURFACES]; 286 private HandlerThread mResultThread = null; 287 private Handler mResultHandler = null; 288 289 private volatile boolean mThreadExitFlag = false; 290 291 private volatile ServerSocket mSocket = null; 292 private volatile SocketRunnable mSocketRunnableObj = null; 293 private Semaphore mSocketQueueQuota = null; 294 private int mMemoryQuota = -1; 295 private LinkedList<Integer> mInflightImageSizes = new LinkedList<>(); 296 private volatile BlockingQueue<ByteBuffer> mSocketWriteQueue = new LinkedBlockingDeque<>(); 297 private final Object mSocketWriteEnqueueLock = new Object(); 298 private final Object mSocketWriteDrainLock = new Object(); 299 300 private volatile BlockingQueue<Object[]> mSerializerQueue = new LinkedBlockingDeque<>(); 301 302 private final AtomicInteger mCountCallbacksRemaining = new AtomicInteger(); 303 private AtomicInteger mCountRawOrDng = new AtomicInteger(); 304 private AtomicInteger mCountRaw10 = new AtomicInteger(); 305 private AtomicInteger mCountRaw12 = new AtomicInteger(); 306 private AtomicInteger mCountJpg = new AtomicInteger(); 307 private AtomicInteger mCountYuv = new AtomicInteger(); 308 private AtomicInteger mCountCapRes = new AtomicInteger(); 309 private AtomicInteger mCountRaw10QuadBayer = new AtomicInteger(); 310 private AtomicInteger mCountRaw10Stats = new AtomicInteger(); 311 private AtomicInteger mCountRaw10QuadBayerStats = new AtomicInteger(); 312 private AtomicInteger mCountRaw = new AtomicInteger(); 313 private AtomicInteger mCountRawQuadBayer = new AtomicInteger(); 314 private AtomicInteger mCountRawStats = new AtomicInteger(); 315 private AtomicInteger mCountRawQuadBayerStats = new AtomicInteger(); 316 private boolean mCaptureRawIsDng; 317 private boolean mCaptureRawIsStats; 318 private boolean mCaptureRawIsQuadBayer; 319 private boolean mCaptureRawIsQuadBayerStats; 320 private int mCaptureStatsGridWidth; 321 private int mCaptureStatsGridHeight; 322 private CaptureResult[] mCaptureResults = null; 323 private MediaRecorder mMediaRecorder; 324 private Surface mRecordSurface; 325 private CaptureRequest.Builder mCaptureRequestBuilder; 326 327 private volatile ConditionVariable mInterlock3A = new ConditionVariable(true); 328 329 final Object m3AStateLock = new Object(); 330 private volatile boolean mConvergedAE = false; 331 private volatile boolean mPrecaptureTriggered = false; 332 private volatile boolean mConvergeAETriggered = false; 333 private volatile boolean mConvergedAF = false; 334 private volatile boolean mConvergedAWB = false; 335 private volatile boolean mLockedAE = false; 336 private volatile boolean mLockedAWB = false; 337 private volatile boolean mNeedsLockedAE = false; 338 private volatile boolean mNeedsLockedAWB = false; 339 private volatile boolean mDoAE = true; 340 private volatile boolean mDoAF = true; 341 private volatile boolean mSend3AResults = true; 342 private final LinkedBlockingQueue<String> unavailableEventQueue = new LinkedBlockingQueue<>(); 343 private final LinkedBlockingQueue<Pair<String, String>> unavailablePhysicalCamEventQueue = 344 new LinkedBlockingQueue<>(); 345 private Set<String> mUnavailablePhysicalCameras; 346 347 static class MySensorEvent { 348 public Sensor sensor; 349 public int accuracy; 350 public long timestamp; 351 public float[] values; 352 } 353 354 CameraManager.AvailabilityCallback ac = new CameraManager.AvailabilityCallback() { 355 @Override 356 public void onCameraAvailable(String cameraId) { 357 super.onCameraAvailable(cameraId); 358 } 359 360 @Override 361 public void onCameraUnavailable(String cameraId) { 362 super.onCameraUnavailable(cameraId); 363 unavailableEventQueue.offer(cameraId); 364 } 365 366 @Override 367 public void onPhysicalCameraAvailable(String cameraId, String physicalCameraId) { 368 super.onPhysicalCameraAvailable(cameraId, physicalCameraId); 369 unavailablePhysicalCamEventQueue.remove(new Pair<>(cameraId, physicalCameraId)); 370 } 371 372 @Override 373 public void onPhysicalCameraUnavailable(String cameraId, String physicalCameraId) { 374 super.onPhysicalCameraUnavailable(cameraId, physicalCameraId); 375 unavailablePhysicalCamEventQueue.offer(new Pair<>(cameraId, physicalCameraId)); 376 } 377 }; 378 379 static class VideoRecordingObject { 380 private static final int INVALID_FRAME_RATE = -1; 381 382 public String recordedOutputPath; 383 public String quality; 384 public Size videoSize; 385 public int videoFrameRate; // -1 implies video framerate was not set by the test 386 public int fileFormat; 387 public double zoomRatio; 388 public Map<String, String> metadata = new HashMap<>(); 389 public List<RecordingResult> perFrameCaptureResults; 390 VideoRecordingObject(String recordedOutputPath, String quality, Size videoSize, int videoFrameRate, int fileFormat, double zoomRatio, List<RecordingResult> perFrameCaptureResults)391 public VideoRecordingObject(String recordedOutputPath, 392 String quality, Size videoSize, int videoFrameRate, 393 int fileFormat, double zoomRatio, 394 List<RecordingResult> perFrameCaptureResults) { 395 this.recordedOutputPath = recordedOutputPath; 396 this.quality = quality; 397 this.videoSize = videoSize; 398 this.videoFrameRate = videoFrameRate; 399 this.fileFormat = fileFormat; 400 this.zoomRatio = zoomRatio; 401 this.perFrameCaptureResults = perFrameCaptureResults; 402 } 403 VideoRecordingObject(String recordedOutputPath, String quality, Size videoSize, int fileFormat, double zoomRatio, List<RecordingResult> perFrameCaptureResults)404 VideoRecordingObject(String recordedOutputPath, String quality, Size videoSize, 405 int fileFormat, double zoomRatio, 406 List<RecordingResult> perFrameCaptureResults) { 407 this(recordedOutputPath, quality, videoSize, 408 INVALID_FRAME_RATE, fileFormat, zoomRatio, 409 perFrameCaptureResults); 410 } 411 isFrameRateValid()412 public boolean isFrameRateValid() { 413 return videoFrameRate != INVALID_FRAME_RATE; 414 } 415 } 416 417 // For capturing motion sensor traces. 418 private SensorManager mSensorManager = null; 419 private Sensor mAccelSensor = null; 420 private Sensor mMagSensor = null; 421 private Sensor mGyroSensor = null; 422 private Sensor mRotationVector = null; 423 private volatile LinkedList<MySensorEvent> mEvents = null; 424 private final Object mEventLock = new Object(); 425 private volatile boolean mEventsEnabled = false; 426 private HandlerThread mSensorThread = null; 427 private Handler mSensorHandler = null; 428 429 private SensorPrivacyManager mSensorPrivacyManager; 430 431 // Camera test instrumentation 432 private CameraTestInstrumentation mCameraInstrumentation; 433 // Camera PerformanceTest metric 434 private final ArrayList<Metric> mResults = new ArrayList<>(); 435 436 private static final int SERIALIZER_SURFACES_ID = 2; 437 private static final int SERIALIZER_PHYSICAL_METADATA_ID = 3; 438 439 public interface CaptureCallback { onCaptureAvailable(Image capture, String physicalCameraId)440 void onCaptureAvailable(Image capture, String physicalCameraId); 441 } 442 443 public abstract static class CaptureResultListener extends 444 CameraCaptureSession.CaptureCallback {} 445 public abstract static class ExtensionCaptureResultListener extends 446 CameraExtensionSession.ExtensionCaptureCallback {} 447 448 @Override onBind(Intent intent)449 public IBinder onBind(Intent intent) { 450 return null; 451 } 452 453 @Override onCreate()454 public void onCreate() { 455 try { 456 mThreadExitFlag = false; 457 458 // Get handle to camera manager. 459 mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE); 460 if (mCameraManager == null) { 461 throw new ItsException("Failed to connect to camera manager"); 462 } 463 mBlockingCameraManager = new ItsService.BlockingCameraManager(mCameraManager); 464 mCameraListener = new BlockingStateCallback(); 465 466 // Register for motion events. 467 mEvents = new LinkedList<>(); 468 mSensorManager = (SensorManager)getSystemService(Context.SENSOR_SERVICE); 469 mAccelSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); 470 mMagSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD); 471 mGyroSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE); 472 mRotationVector = mSensorManager.getDefaultSensor(Sensor.TYPE_ROTATION_VECTOR); 473 mSensorThread = new HandlerThread("SensorThread"); 474 mSensorThread.start(); 475 mSensorHandler = new Handler(mSensorThread.getLooper()); 476 mSensorManager.registerListener(this, mAccelSensor, 477 /*100Hz*/ 10000, mSensorHandler); 478 mSensorManager.registerListener(this, mMagSensor, 479 SensorManager.SENSOR_DELAY_NORMAL, mSensorHandler); 480 mSensorManager.registerListener(this, mGyroSensor, 481 SensorManager.SENSOR_DELAY_FASTEST, mSensorHandler); 482 mSensorManager.registerListener(this, mRotationVector, 483 SensorManager.SENSOR_DELAY_FASTEST, mSensorHandler); 484 485 // Get a handle to the system vibrator. 486 mVibrator = (Vibrator)getSystemService(Context.VIBRATOR_SERVICE); 487 488 // Create threads to receive images and save them. 489 for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) { 490 mSaveThreads[i] = new HandlerThread("SaveThread" + i); 491 mSaveThreads[i].start(); 492 mSaveHandlers[i] = new Handler(mSaveThreads[i].getLooper()); 493 } 494 495 // Create a thread to handle object serialization. 496 (new Thread(new SerializerRunnable())).start(); 497 498 // Create a thread to receive capture results and process them. 499 mResultThread = new HandlerThread("ResultThread"); 500 mResultThread.start(); 501 mResultHandler = new Handler(mResultThread.getLooper()); 502 503 // Create a thread for the camera device. 504 mCameraThread = new HandlerThread("ItsCameraThread"); 505 mCameraThread.start(); 506 mCameraHandler = new Handler(mCameraThread.getLooper()); 507 508 // Create a thread to process commands, listening on a TCP socket. 509 mSocketRunnableObj = new SocketRunnable(); 510 (new Thread(mSocketRunnableObj)).start(); 511 } catch (ItsException e) { 512 Logt.e(TAG, "Service failed to start: ", e); 513 } 514 515 NotificationManager notificationManager = 516 (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); 517 mChannel = new NotificationChannel( 518 "ItsServiceChannel", "ItsService", NotificationManager.IMPORTANCE_LOW); 519 // Configure the notification channel. 520 mChannel.setDescription("ItsServiceChannel"); 521 mChannel.enableVibration(false); 522 notificationManager.createNotificationChannel(mChannel); 523 524 mSensorPrivacyManager = getSystemService(SensorPrivacyManager.class); 525 } 526 527 @Override onStartCommand(Intent intent, int flags, int startId)528 public int onStartCommand(Intent intent, int flags, int startId) { 529 try { 530 // Just log a message indicating that the service is running and is able to accept 531 // socket connections. 532 while (!mThreadExitFlag && mSocket==null) { 533 Thread.sleep(1); 534 } 535 536 if (intent != null && intent.hasExtra(ItsTestActivity.JCA_VIDEO_PATH_TAG)) { 537 try { 538 mSocketRunnableObj.sendResponse(ItsTestActivity.JCA_CAPTURE_STATUS_TAG, 539 Integer.toString(intent.getIntExtra( 540 ItsTestActivity.JCA_CAPTURE_STATUS_TAG, 541 Activity.RESULT_CANCELED) 542 ) 543 ); 544 mSocketRunnableObj.sendResponse( 545 ItsTestActivity.JCA_VIDEO_PATH_TAG, 546 intent.getStringExtra(ItsTestActivity.JCA_VIDEO_PATH_TAG)); 547 } catch (ItsException e) { 548 Logt.e(TAG, "Error sending JCA capture path and status", e); 549 } 550 return START_STICKY; 551 } 552 if (intent != null && intent.hasExtra(ItsTestActivity.JCA_CAPTURE_PATHS_TAG)) { 553 try { 554 mSocketRunnableObj.sendResponse(ItsTestActivity.JCA_CAPTURE_STATUS_TAG, 555 Integer.toString(intent.getIntExtra( 556 ItsTestActivity.JCA_CAPTURE_STATUS_TAG, 557 Activity.RESULT_CANCELED) 558 ) 559 ); 560 JSONObject obj = new JSONObject(); 561 JSONArray jcaCapturePaths = new JSONArray(intent.getStringArrayListExtra( 562 ItsTestActivity.JCA_CAPTURE_PATHS_TAG)); 563 obj.put(ItsTestActivity.JCA_CAPTURE_PATHS_TAG, jcaCapturePaths); 564 Logt.i(TAG, "Sending JCA capture paths: " + obj.toString()); 565 mSocketRunnableObj.sendResponse( 566 ItsTestActivity.JCA_CAPTURE_PATHS_TAG, 567 obj); 568 } catch (ItsException | org.json.JSONException e) { 569 Logt.e(TAG, "Error sending JCA capture path and status", e); 570 } 571 return START_STICKY; 572 } 573 574 if (!mThreadExitFlag){ 575 Logt.i(TAG, "ItsService ready"); 576 } else { 577 Logt.e(TAG, "Starting ItsService in bad state"); 578 } 579 580 Notification notification = new Notification.Builder(this, mChannel.getId()) 581 .setContentTitle("CameraITS Service") 582 .setContentText("CameraITS Service is running") 583 .setSmallIcon(R.drawable.icon) 584 .setOngoing(true).build(); 585 startForeground(SERVICE_NOTIFICATION_ID, notification, 586 ServiceInfo.FOREGROUND_SERVICE_TYPE_CAMERA); 587 } catch (java.lang.InterruptedException e) { 588 Logt.e(TAG, "Error starting ItsService (interrupted)", e); 589 } 590 return START_STICKY; 591 } 592 593 @Override onDestroy()594 public void onDestroy() { 595 mThreadExitFlag = true; 596 for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) { 597 if (mSaveThreads[i] != null) { 598 mSaveThreads[i].quit(); 599 mSaveThreads[i] = null; 600 } 601 } 602 if (mSensorThread != null) { 603 mSensorThread.quitSafely(); 604 mSensorThread = null; 605 } 606 if (mResultThread != null) { 607 mResultThread.quitSafely(); 608 mResultThread = null; 609 } 610 if (mCameraThread != null) { 611 mCameraThread.quitSafely(); 612 mCameraThread = null; 613 } 614 } 615 616 private static class BlockingCameraManager 617 extends com.android.ex.camera2.blocking.BlockingCameraManager { 618 BlockingCameraManager(CameraManager manager)619 BlockingCameraManager(CameraManager manager) { 620 super(manager); 621 } 622 openCamera(String cameraId, boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)623 public CameraDevice openCamera(String cameraId, boolean overrideToPortrait, 624 CameraDevice.StateCallback listener, Handler handler) 625 throws CameraAccessException, BlockingOpenException { 626 if (handler == null) { 627 throw new IllegalArgumentException("handler must not be null"); 628 } else if (handler.getLooper() == Looper.myLooper()) { 629 throw new IllegalArgumentException( 630 "handler's looper must not be the current looper"); 631 } 632 633 return (new OpenListener(mManager, cameraId, overrideToPortrait, listener, handler)) 634 .blockUntilOpen(); 635 } 636 637 protected class OpenListener 638 extends com.android.ex.camera2.blocking.BlockingCameraManager.OpenListener { OpenListener(CameraManager manager, String cameraId, boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)639 OpenListener(CameraManager manager, String cameraId, boolean overrideToPortrait, 640 CameraDevice.StateCallback listener, Handler handler) 641 throws CameraAccessException { 642 super(cameraId, listener); 643 manager.openCamera(cameraId, overrideToPortrait, handler, this); 644 } 645 } 646 } 647 openCameraDevice(String cameraId, JSONObject cmdObj)648 public void openCameraDevice(String cameraId, JSONObject cmdObj) throws ItsException { 649 Logt.i(TAG, String.format("Opening camera %s", cameraId)); 650 651 // Get initial physical unavailable callbacks without opening camera 652 mCameraManager.registerAvailabilityCallback(ac, mCameraHandler); 653 654 try { 655 if (mMemoryQuota == -1) { 656 // Initialize memory quota on this device 657 if (mItsCameraIdList == null) { 658 mItsCameraIdList = ItsUtils.getItsCompatibleCameraIds(mCameraManager); 659 } 660 if (mItsCameraIdList.mCameraIds.size() == 0) { 661 throw new ItsException("No camera devices"); 662 } 663 for (String camId : mItsCameraIdList.mCameraIds) { 664 CameraCharacteristics chars = mCameraManager.getCameraCharacteristics(camId); 665 Size maxYuvSize = ItsUtils.getMaxOutputSize( 666 chars, ImageFormat.YUV_420_888); 667 // 4 bytes per pixel for RGBA8888 Bitmap and at least 3 Bitmaps per CDD 668 int quota = maxYuvSize.getWidth() * maxYuvSize.getHeight() * 4 * 3; 669 if (quota > mMemoryQuota) { 670 mMemoryQuota = quota; 671 } 672 } 673 } 674 } catch (CameraAccessException e) { 675 throw new ItsException("Failed to get device ID list", e); 676 } 677 678 try { 679 mUnavailablePhysicalCameras = getUnavailablePhysicalCameras( 680 unavailablePhysicalCamEventQueue, cameraId); 681 Log.i(TAG, "Unavailable cameras:" + List.of(mUnavailablePhysicalCameras.toString())); 682 if (cmdObj.has("overrideToPortrait")) { 683 mCamera = mBlockingCameraManager.openCamera(cameraId, 684 cmdObj.getBoolean("overrideToPortrait"), mCameraListener, mCameraHandler); 685 } else { 686 mCamera = mBlockingCameraManager.openCamera(cameraId, mCameraListener, 687 mCameraHandler); 688 } 689 mCameraCharacteristics = mCameraManager.getCameraCharacteristics(cameraId); 690 mCameraExtensionCharacteristics = mCameraManager.getCameraExtensionCharacteristics( 691 cameraId); 692 // The camera should be in available->unavailable state. 693 unavailableEventQueue.clear(); 694 boolean isLogicalCamera = hasCapability( 695 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA); 696 if (isLogicalCamera) { 697 Set<String> physicalCameraIds = mCameraCharacteristics.getPhysicalCameraIds(); 698 for (String id : physicalCameraIds) { 699 if (mUnavailablePhysicalCameras.contains(id)) { 700 Log.i(TAG, "Physical camera id not available: " + id); 701 continue; 702 } 703 mPhysicalCameraChars.put(id, mCameraManager.getCameraCharacteristics(id)); 704 } 705 } 706 mSocketQueueQuota = new Semaphore(mMemoryQuota, true); 707 } catch (CameraAccessException e) { 708 throw new ItsException("Failed to open camera", e); 709 } catch (BlockingOpenException e) { 710 throw new ItsException("Failed to open camera (after blocking)", e); 711 } catch (org.json.JSONException e) { 712 throw new ItsException("Failed to read open camera command", e); 713 } catch (Exception e) { 714 throw new ItsException("Failed to get unavailable physical cameras", e); 715 } 716 mSocketRunnableObj.sendResponse("cameraOpened", ""); 717 } 718 closeCameraDevice()719 public void closeCameraDevice() throws ItsException { 720 try { 721 if (mSession != null) { 722 Logt.i(TAG, "Closing session upon closing camera device."); 723 mSession.close(); 724 mSession = null; 725 } 726 if (mCamera != null) { 727 Logt.i(TAG, "Closing camera"); 728 mCamera.close(); 729 mCamera = null; 730 mCameraManager.unregisterAvailabilityCallback(ac); 731 unavailablePhysicalCamEventQueue.clear(); 732 } 733 // Reset OutputConfigurations and ImageReader args 734 mCaptureOutputConfigs = new ArrayList<>(); 735 closeImageReaders(); 736 } catch (Exception e) { 737 throw new ItsException("Failed to close device"); 738 } 739 mSocketRunnableObj.sendResponse("cameraClosed", ""); 740 } 741 742 class SerializerRunnable implements Runnable { 743 // Use a separate thread to perform JSON serialization (since this can be slow due to 744 // the reflection). 745 @Override run()746 public void run() { 747 Logt.i(TAG, "Serializer thread starting"); 748 while (! mThreadExitFlag) { 749 try { 750 Object[] objs = mSerializerQueue.take(); 751 JSONObject jsonObj = new JSONObject(); 752 String tag = null; 753 for (int i = 0; i < objs.length; i++) { 754 Object obj = objs[i]; 755 if (obj instanceof String) { 756 if (tag != null) { 757 throw new ItsException("Multiple tags for socket response"); 758 } 759 tag = (String)obj; 760 } else if (obj instanceof CameraCharacteristics) { 761 jsonObj.put("cameraProperties", ItsSerializer.serialize( 762 (CameraCharacteristics)obj)); 763 } else if (obj instanceof CaptureRequest) { 764 jsonObj.put("captureRequest", ItsSerializer.serialize( 765 (CaptureRequest)obj)); 766 } else if (obj instanceof CaptureResult) { 767 jsonObj.put("captureResult", ItsSerializer.serialize( 768 (CaptureResult)obj)); 769 } else if (obj instanceof JSONArray) { 770 if (tag == "captureResults") { 771 if (i == SERIALIZER_SURFACES_ID) { 772 jsonObj.put("outputs", (JSONArray)obj); 773 } else if (i == SERIALIZER_PHYSICAL_METADATA_ID) { 774 jsonObj.put("physicalResults", (JSONArray)obj); 775 } else { 776 throw new ItsException( 777 "Unsupported JSONArray for captureResults"); 778 } 779 } else { 780 jsonObj.put("outputs", (JSONArray)obj); 781 } 782 } else { 783 throw new ItsException("Invalid object received for serialization"); 784 } 785 } 786 if (tag == null) { 787 throw new ItsException("No tag provided for socket response"); 788 } 789 mSocketRunnableObj.sendResponse(tag, null, jsonObj, null); 790 Logt.i(TAG, String.format("Serialized %s", tag)); 791 } catch (JSONException | ItsException e) { 792 Logt.e(TAG, "Error serializing object", e); 793 break; 794 } catch (java.lang.InterruptedException e) { 795 Logt.e(TAG, "Error serializing object (interrupted)", e); 796 break; 797 } 798 } 799 Logt.i(TAG, "Serializer thread terminated"); 800 } 801 } 802 803 class SocketWriteRunnable implements Runnable { 804 805 // Use a separate thread to service a queue of objects to be written to the socket, 806 // writing each sequentially in order. This is needed since different handler functions 807 // (called on different threads) will need to send data back to the host script. 808 809 public Socket mOpenSocket = null; 810 private Thread mThread = null; 811 SocketWriteRunnable(Socket openSocket)812 public SocketWriteRunnable(Socket openSocket) { 813 mOpenSocket = openSocket; 814 } 815 setOpenSocket(Socket openSocket)816 public void setOpenSocket(Socket openSocket) { 817 mOpenSocket = openSocket; 818 } 819 820 @Override run()821 public void run() { 822 Logt.i(TAG, "Socket writer thread starting"); 823 while (true) { 824 try { 825 ByteBuffer b = mSocketWriteQueue.take(); 826 synchronized(mSocketWriteDrainLock) { 827 if (mOpenSocket == null) { 828 Logt.e(TAG, "No open socket connection!"); 829 continue; 830 } 831 if (b.hasArray()) { 832 mOpenSocket.getOutputStream().write(b.array(), 0, b.capacity()); 833 } else { 834 byte[] barray = new byte[b.capacity()]; 835 b.get(barray); 836 mOpenSocket.getOutputStream().write(barray); 837 } 838 mOpenSocket.getOutputStream().flush(); 839 Logt.i(TAG, String.format("Wrote to socket: %d bytes", b.capacity())); 840 Integer imgBufSize = mInflightImageSizes.peek(); 841 if (imgBufSize != null && imgBufSize == b.capacity()) { 842 mInflightImageSizes.removeFirst(); 843 if (mSocketQueueQuota != null) { 844 mSocketQueueQuota.release(imgBufSize); 845 } 846 } 847 } 848 } catch (IOException e) { 849 Logt.e(TAG, "Error writing to socket", e); 850 mOpenSocket = null; 851 break; 852 } catch (java.lang.InterruptedException e) { 853 Logt.e(TAG, "Error writing to socket (interrupted)", e); 854 mOpenSocket = null; 855 break; 856 } 857 } 858 Logt.i(TAG, "Socket writer thread terminated"); 859 } 860 checkAndStartThread()861 public synchronized void checkAndStartThread() { 862 if (mThread == null || mThread.getState() == Thread.State.TERMINATED) { 863 mThread = new Thread(this); 864 } 865 if (mThread.getState() == Thread.State.NEW) { 866 mThread.start(); 867 } 868 } 869 870 } 871 872 class SocketRunnable implements Runnable { 873 874 // Format of sent messages (over the socket): 875 // * Serialized JSON object on a single line (newline-terminated) 876 // * For byte buffers, the binary data then follows 877 // 878 // Format of received messages (from the socket): 879 // * Serialized JSON object on a single line (newline-terminated) 880 881 private Socket mOpenSocket = null; 882 883 @Override run()884 public void run() { 885 Logt.i(TAG, "Socket thread starting"); 886 try { 887 mSocket = new ServerSocket(SERVERPORT); 888 } catch (IOException e) { 889 Logt.e(TAG, "Failed to create socket", e); 890 } 891 892 // Create a new thread to handle writes to this socket. 893 SocketWriteRunnable socketWriteRunnable = new SocketWriteRunnable(null); 894 895 while (!mThreadExitFlag) { 896 // Receive the socket-open request from the host. 897 try { 898 Logt.i(TAG, "Waiting for client to connect to socket"); 899 if (mSocket == null) { 900 Logt.e(TAG, "mSocket is null."); 901 break; 902 } 903 mOpenSocket = mSocket.accept(); 904 if (mOpenSocket == null) { 905 Logt.e(TAG, "Socket connection error"); 906 break; 907 } 908 mSocketWriteQueue.clear(); 909 mInflightImageSizes.clear(); 910 socketWriteRunnable.setOpenSocket(mOpenSocket); 911 socketWriteRunnable.checkAndStartThread(); 912 Logt.i(TAG, "Socket connected"); 913 } catch (IOException e) { 914 Logt.e(TAG, "Socket open error: ", e); 915 break; 916 } 917 918 // Process commands over the open socket. 919 while (!mThreadExitFlag) { 920 try { 921 BufferedReader input = new BufferedReader( 922 new InputStreamReader(mOpenSocket.getInputStream())); 923 if (input == null) { 924 Logt.e(TAG, "Failed to get socket input stream"); 925 break; 926 } 927 String line = input.readLine(); 928 if (line == null) { 929 Logt.i(TAG, "Socket readline returned null (host disconnected)"); 930 break; 931 } 932 processSocketCommand(line); 933 } catch (IOException e) { 934 Logt.e(TAG, "Socket read error: ", e); 935 break; 936 } catch (ItsException e) { 937 Logt.e(TAG, "Script error: ", e); 938 break; 939 } 940 } 941 942 // Close socket and go back to waiting for a new connection. 943 try { 944 synchronized(mSocketWriteDrainLock) { 945 mSocketWriteQueue.clear(); 946 mInflightImageSizes.clear(); 947 mOpenSocket.close(); 948 mOpenSocket = null; 949 socketWriteRunnable.setOpenSocket(null); 950 Logt.i(TAG, "Socket disconnected"); 951 } 952 } catch (java.io.IOException e) { 953 Logt.e(TAG, "Exception closing socket"); 954 } 955 } 956 957 // It's an overall error state if the code gets here; no recevery. 958 // Try to do some cleanup, but the service probably needs to be restarted. 959 Logt.i(TAG, "Socket server loop exited"); 960 mThreadExitFlag = true; 961 try { 962 synchronized(mSocketWriteDrainLock) { 963 if (mOpenSocket != null) { 964 mOpenSocket.close(); 965 mOpenSocket = null; 966 socketWriteRunnable.setOpenSocket(null); 967 } 968 } 969 } catch (java.io.IOException e) { 970 Logt.w(TAG, "Exception closing socket"); 971 } 972 try { 973 if (mSocket != null) { 974 mSocket.close(); 975 mSocket = null; 976 } 977 } catch (java.io.IOException e) { 978 Logt.w(TAG, "Exception closing socket"); 979 } 980 } 981 processSocketCommand(String cmd)982 public void processSocketCommand(String cmd) 983 throws ItsException { 984 // Default locale must be set to "en-us" 985 Locale locale = Locale.getDefault(); 986 if (!Locale.US.equals(locale)) { 987 Logt.e(TAG, "Default language is not set to " + Locale.US + "!"); 988 stopSelf(); 989 } 990 991 // Each command is a serialized JSON object. 992 try { 993 JSONObject cmdObj = new JSONObject(cmd); 994 Logt.i(TAG, "Start processing command: " + cmdObj.getString("cmdName")); 995 if ("open".equals(cmdObj.getString("cmdName"))) { 996 String cameraId = cmdObj.getString("cameraId"); 997 openCameraDevice(cameraId, cmdObj); 998 } else if ("close".equals(cmdObj.getString("cmdName"))) { 999 closeCameraDevice(); 1000 } else if ("getCameraProperties".equals(cmdObj.getString("cmdName"))) { 1001 doGetProps(); 1002 } else if ("getCameraPropertiesById".equals(cmdObj.getString("cmdName"))) { 1003 doGetPropsById(cmdObj); 1004 } else if ("startSensorEvents".equals(cmdObj.getString("cmdName"))) { 1005 doStartSensorEvents(); 1006 } else if ("checkSensorExistence".equals(cmdObj.getString("cmdName"))) { 1007 doCheckSensorExistence(); 1008 } else if ("getSensorEvents".equals(cmdObj.getString("cmdName"))) { 1009 doGetSensorEvents(); 1010 } else if ("do3A".equals(cmdObj.getString("cmdName"))) { 1011 do3A(cmdObj); 1012 } else if ("doCapture".equals(cmdObj.getString("cmdName"))) { 1013 doCapture(cmdObj); 1014 } else if ("doVibrate".equals(cmdObj.getString("cmdName"))) { 1015 doVibrate(cmdObj); 1016 } else if ("setAudioRestriction".equals(cmdObj.getString("cmdName"))) { 1017 doSetAudioRestriction(cmdObj); 1018 } else if ("getCameraIds".equals(cmdObj.getString("cmdName"))) { 1019 doGetCameraIds(); 1020 } else if ("doReprocessCapture".equals(cmdObj.getString("cmdName"))) { 1021 doReprocessCapture(cmdObj); 1022 } else if ("getItsVersion".equals(cmdObj.getString("cmdName"))) { 1023 mSocketRunnableObj.sendResponse("ItsVersion", ITS_SERVICE_VERSION); 1024 } else if ("isStreamCombinationSupported".equals(cmdObj.getString("cmdName"))) { 1025 doCheckStreamCombination(cmdObj); 1026 } else if ("getCameraSessionProperties".equals(cmdObj.getString("cmdName"))) { 1027 doGetSessionProps(cmdObj); 1028 } else if ("isCameraPrivacyModeSupported".equals(cmdObj.getString("cmdName"))) { 1029 doCheckCameraPrivacyModeSupport(); 1030 } else if ("isPrimaryCamera".equals(cmdObj.getString("cmdName"))) { 1031 String cameraId = cmdObj.getString("cameraId"); 1032 doCheckPrimaryCamera(cameraId); 1033 } else if ("isPerformanceClass".equals(cmdObj.getString("cmdName"))) { 1034 doCheckPerformanceClass(); 1035 } else if ("isVicPerformanceClass".equals(cmdObj.getString("cmdName"))) { 1036 doCheckVicPerformanceClass(); 1037 } else if ("measureCameraLaunchMs".equals(cmdObj.getString("cmdName"))) { 1038 String cameraId = cmdObj.getString("cameraId"); 1039 doMeasureCameraLaunchMs(cameraId); 1040 } else if ("measureCamera1080pJpegCaptureMs".equals(cmdObj.getString("cmdName"))) { 1041 String cameraId = cmdObj.getString("cameraId"); 1042 doMeasureCamera1080pJpegCaptureMs(cameraId); 1043 } else if ("getSupportedVideoQualities".equals(cmdObj.getString("cmdName"))) { 1044 String cameraId = cmdObj.getString("cameraId"); 1045 doGetSupportedVideoQualities(cameraId); 1046 } else if ("doGetSupportedVideoSizesCapped".equals(cmdObj.getString("cmdName"))) { 1047 String cameraId = cmdObj.getString("cameraId"); 1048 doGetSupportedVideoSizesCapped(cameraId); 1049 } else if ("getSupportedPreviewSizes".equals(cmdObj.getString("cmdName"))) { 1050 boolean filterRecordable = cmdObj.optBoolean("filter_recordable", false); 1051 doGetSupportedPreviewSizes(filterRecordable); 1052 } else if ("getQueryableStreamCombinations".equals(cmdObj.getString("cmdName"))) { 1053 doGetQueryableStreamCombinations(); 1054 } else if ("getSupportedExtensions".equals(cmdObj.getString("cmdName"))) { 1055 String cameraId = cmdObj.getString("cameraId"); 1056 doGetSupportedExtensions(cameraId); 1057 } else if ("getSupportedExtensionSizes".equals(cmdObj.getString("cmdName"))) { 1058 String cameraId = cmdObj.getString("cameraId"); 1059 int extension = cmdObj.getInt("extension"); 1060 int format = cmdObj.getInt("format"); 1061 doGetSupportedExtensionSizes(cameraId, extension, format); 1062 } else if ("getSupportedExtensionPreviewSizes" 1063 .equals(cmdObj.getString("cmdName"))) { 1064 String cameraId = cmdObj.getString("cameraId"); 1065 int extension = cmdObj.getInt("extension"); 1066 doGetSupportedExtensionPreviewSizes(cameraId, extension); 1067 } else if ("doBasicRecording".equals(cmdObj.getString("cmdName"))) { 1068 String cameraId = cmdObj.getString("cameraId"); 1069 int profileId = cmdObj.getInt("profileId"); 1070 String quality = cmdObj.getString("quality"); 1071 int recordingDuration = cmdObj.getInt("recordingDuration"); 1072 int videoStabilizationMode = cmdObj.getInt("videoStabilizationMode"); 1073 boolean hlg10Enabled = cmdObj.getBoolean("hlg10Enabled"); 1074 double zoomRatio = cmdObj.optDouble("zoomRatio"); 1075 int aeTargetFpsMin = cmdObj.optInt("aeTargetFpsMin"); 1076 int aeTargetFpsMax = cmdObj.optInt("aeTargetFpsMax"); 1077 int aeAntibandingMode = cmdObj.optInt("aeAntibandingMode", -1); 1078 int faceDetectMode = cmdObj.optInt("faceDetectMode"); 1079 doBasicRecording(cameraId, profileId, quality, recordingDuration, 1080 videoStabilizationMode, hlg10Enabled, zoomRatio, 1081 aeTargetFpsMin, aeTargetFpsMax, aeAntibandingMode, faceDetectMode); 1082 } else if ("doStaticPreviewRecording".equals(cmdObj.getString("cmdName"))) { 1083 doStaticPreviewRecording(cmdObj); 1084 } else if ("doDynamicZoomPreviewRecording".equals( 1085 cmdObj.getString("cmdName"))) { 1086 doDynamicZoomPreviewRecording(cmdObj); 1087 } else if ("doDynamicMeteringRegionPreviewRecording".equals( 1088 cmdObj.getString("cmdName"))) { 1089 doDynamicMeteringRegionPreviewRecording(cmdObj); 1090 } else if ("isHLG10SupportedForProfile".equals(cmdObj.getString("cmdName"))) { 1091 String cameraId = cmdObj.getString("cameraId"); 1092 int profileId = cmdObj.getInt("profileId"); 1093 doCheckHLG10SupportForProfile(cameraId, profileId); 1094 } else if ("isHLG10SupportedForSizeAndFps".equals(cmdObj.getString("cmdName"))) { 1095 String cameraId = cmdObj.getString("cameraId"); 1096 String videoSize = cmdObj.getString("videoSize"); 1097 int maxFps = cmdObj.getInt("maxFps"); 1098 doCheckHLG10SupportForSizeAndFps(cameraId, videoSize, maxFps); 1099 } else if ("isP3Supported".equals(cmdObj.getString("cmdName"))) { 1100 String cameraId = cmdObj.getString("cameraId"); 1101 doCheckP3Support(cameraId); 1102 } else if ("isLandscapeToPortraitEnabled".equals(cmdObj.getString("cmdName"))) { 1103 doCheckLandscapeToPortraitEnabled(); 1104 } else if ("doCaptureWithFlash".equals(cmdObj.getString("cmdName"))) { 1105 doCaptureWithFlash(cmdObj); 1106 } else if ("doGetUnavailablePhysicalCameras".equals(cmdObj.getString("cmdName"))) { 1107 doGetUnavailablePhysicalCameras(); 1108 } else if ("doCaptureWithExtensions".equals(cmdObj.getString("cmdName"))) { 1109 int extension = cmdObj.getInt("extension"); 1110 doCaptureWithExtensions(cmdObj, extension); 1111 } else if ("getDisplaySize".equals(cmdObj.getString("cmdName"))) { 1112 doGetDisplaySize(); 1113 } else if ("getMaxCamcorderProfileSize".equals(cmdObj.getString("cmdName"))) { 1114 String cameraId = cmdObj.getString("cameraId"); 1115 doGetMaxCamcorderProfileSize(cameraId); 1116 } else if ("getAvailablePhysicalCameraProperties".equals(cmdObj.getString("cmdName"))) { 1117 doGetAvailablePhysicalCameraProperties(); 1118 } else if ("isLowLightBoostAvailable".equals(cmdObj.getString("cmdName"))) { 1119 String cameraId = cmdObj.getString("cameraId"); 1120 int extension = cmdObj.getInt("extension"); 1121 doCheckLowLightBoostAvailable(cameraId, extension); 1122 } else if ("doCapturePreviewFrame".equals(cmdObj.getString("cmdName"))) { 1123 doCapturePreviewFrame(cmdObj); 1124 } else if ("doGetDefaultCameraPkgName".equals(cmdObj.getString("cmdName"))) { 1125 doGetDefaultCameraPkgName(); 1126 } else if ("doGainMapCheck".equals(cmdObj.getString("cmdName"))) { 1127 doGainMapCheck(cmdObj); 1128 } else if ("isNightModeIndicatorSupported".equals(cmdObj.getString("cmdName"))) { 1129 String cameraId = cmdObj.getString("cameraId"); 1130 doCheckNightModeIndicatorSupported(cameraId); 1131 } else { 1132 throw new ItsException("Unknown command: " + cmd); 1133 } 1134 Logt.i(TAG, "Finish processing command" + cmdObj.getString("cmdName")); 1135 } catch (org.json.JSONException e) { 1136 Logt.e(TAG, "Invalid command: ", e); 1137 } 1138 } 1139 sendResponse(String tag, String str, JSONObject obj, ByteBuffer bbuf)1140 public void sendResponse(String tag, String str, JSONObject obj, ByteBuffer bbuf) 1141 throws ItsException { 1142 try { 1143 JSONObject jsonObj = new JSONObject(); 1144 jsonObj.put("tag", tag); 1145 if (str != null) { 1146 jsonObj.put("strValue", str); 1147 } 1148 if (obj != null) { 1149 jsonObj.put("objValue", obj); 1150 } 1151 if (bbuf != null) { 1152 jsonObj.put("bufValueSize", bbuf.capacity()); 1153 } 1154 ByteBuffer bstr = ByteBuffer.wrap( 1155 (jsonObj.toString()+"\n").getBytes(Charset.defaultCharset())); 1156 synchronized(mSocketWriteEnqueueLock) { 1157 if (bstr != null) { 1158 mSocketWriteQueue.put(bstr); 1159 } 1160 if (bbuf != null) { 1161 mInflightImageSizes.add(bbuf.capacity()); 1162 mSocketWriteQueue.put(bbuf); 1163 } 1164 } 1165 } catch (org.json.JSONException e) { 1166 throw new ItsException("JSON error: ", e); 1167 } catch (java.lang.InterruptedException e) { 1168 throw new ItsException("Socket error: ", e); 1169 } 1170 } 1171 sendResponse(String tag, String str)1172 public void sendResponse(String tag, String str) 1173 throws ItsException { 1174 sendResponse(tag, str, null, null); 1175 } 1176 sendResponse(String tag, JSONObject obj)1177 public void sendResponse(String tag, JSONObject obj) 1178 throws ItsException { 1179 sendResponse(tag, null, obj, null); 1180 } 1181 sendResponseCaptureBuffer(String tag, ByteBuffer bbuf)1182 public void sendResponseCaptureBuffer(String tag, ByteBuffer bbuf) 1183 throws ItsException { 1184 sendResponse(tag, null, null, bbuf); 1185 } 1186 sendResponse(LinkedList<MySensorEvent> events)1187 public void sendResponse(LinkedList<MySensorEvent> events) 1188 throws ItsException { 1189 Logt.i(TAG, "Sending " + events.size() + " sensor events"); 1190 try { 1191 JSONArray accels = new JSONArray(); 1192 JSONArray mags = new JSONArray(); 1193 JSONArray gyros = new JSONArray(); 1194 JSONArray rvs = new JSONArray(); 1195 for (MySensorEvent event : events) { 1196 JSONObject obj = new JSONObject(); 1197 if (event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR) { 1198 float[] mRotationMatrix = new float[16]; 1199 float[] orientationVals = new float[3]; 1200 SensorManager.getRotationMatrixFromVector(mRotationMatrix, event.values); 1201 SensorManager.getOrientation(mRotationMatrix, orientationVals); 1202 orientationVals[0] = (float) Math.toDegrees(orientationVals[0]); 1203 orientationVals[1] = (float) Math.toDegrees(orientationVals[1]); 1204 orientationVals[2] = (float) Math.toDegrees(orientationVals[2]); 1205 obj.put("time", event.timestamp); 1206 obj.put("x", orientationVals[0]); 1207 obj.put("y", orientationVals[1]); 1208 obj.put("z", orientationVals[2]); 1209 } else { 1210 obj.put("time", event.timestamp); 1211 obj.put("x", event.values[0]); 1212 obj.put("y", event.values[1]); 1213 obj.put("z", event.values[2]); 1214 } 1215 if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { 1216 accels.put(obj); 1217 } else if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) { 1218 mags.put(obj); 1219 } else if (event.sensor.getType() == Sensor.TYPE_GYROSCOPE) { 1220 gyros.put(obj); 1221 } else if (event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR) { 1222 rvs.put(obj); 1223 } 1224 } 1225 JSONObject obj = new JSONObject(); 1226 obj.put("accel", accels); 1227 obj.put("mag", mags); 1228 obj.put("gyro", gyros); 1229 obj.put("rv", rvs); 1230 sendResponse("sensorEvents", null, obj, null); 1231 } catch (org.json.JSONException e) { 1232 throw new ItsException("JSON error: ", e); 1233 } 1234 Logt.i(TAG, "Sent sensor events"); 1235 } 1236 sendResponse(CameraCharacteristics props)1237 public void sendResponse(CameraCharacteristics props) 1238 throws ItsException { 1239 try { 1240 Object objs[] = new Object[2]; 1241 objs[0] = "cameraProperties"; 1242 objs[1] = props; 1243 mSerializerQueue.put(objs); 1244 } catch (InterruptedException e) { 1245 throw new ItsException("Interrupted: ", e); 1246 } 1247 } 1248 sendResponse(String tag, HashMap<String, CameraCharacteristics> props)1249 public void sendResponse(String tag, HashMap<String, CameraCharacteristics> props) 1250 throws ItsException { 1251 try { 1252 JSONArray jsonSurfaces = new JSONArray(); 1253 for (String s : props.keySet()) { 1254 JSONObject jsonSurface = new JSONObject(); 1255 jsonSurface.put(s, ItsSerializer.serialize(props.get(s))); 1256 jsonSurfaces.put(jsonSurface); 1257 } 1258 Object objs[] = new Object[2]; 1259 objs[0] = "availablePhysicalCameraProperties"; 1260 objs[1] = jsonSurfaces; 1261 mSerializerQueue.put(objs); 1262 } catch (Exception e) { 1263 throw new ItsException("Interrupted: ", e); 1264 } 1265 } 1266 sendVideoRecordingObject(VideoRecordingObject obj)1267 public void sendVideoRecordingObject(VideoRecordingObject obj) 1268 throws ItsException { 1269 try { 1270 JSONObject videoJson = new JSONObject(); 1271 videoJson.put("recordedOutputPath", obj.recordedOutputPath); 1272 videoJson.put("quality", obj.quality); 1273 if (obj.isFrameRateValid()) { 1274 videoJson.put("videoFrameRate", obj.videoFrameRate); 1275 } 1276 videoJson.put("videoSize", obj.videoSize); 1277 JSONObject metadata = new JSONObject(); 1278 for (Map.Entry<String, String> entry : obj.metadata.entrySet()) { 1279 metadata.put(entry.getKey(), entry.getValue()); 1280 } 1281 videoJson.put("metadata", metadata); 1282 JSONArray captureMetadata = new JSONArray(); 1283 for (RecordingResult r : obj.perFrameCaptureResults) { 1284 captureMetadata.put(ItsSerializer.serialize(r)); 1285 } 1286 videoJson.put("captureMetadata", captureMetadata); 1287 sendResponse("recordingResponse", null, videoJson, null); 1288 } catch (org.json.JSONException e) { 1289 throw new ItsException("JSON error: ", e); 1290 } 1291 } 1292 sendResponseCaptureResult( CaptureRequest request, TotalCaptureResult result, ImageReader[] readers)1293 public void sendResponseCaptureResult( 1294 CaptureRequest request, TotalCaptureResult result, ImageReader[] readers) 1295 throws ItsException { 1296 try { 1297 JSONArray jsonSurfaces = new JSONArray(); 1298 for (int i = 0; i < readers.length; i++) { 1299 JSONObject jsonSurface = new JSONObject(); 1300 jsonSurface.put("width", readers[i].getWidth()); 1301 jsonSurface.put("height", readers[i].getHeight()); 1302 1303 CameraCharacteristics cameraCharacteristics = mCameraCharacteristics; 1304 String physicalCameraId = mPhysicalStreamMap.get(i); 1305 if (physicalCameraId != null && !physicalCameraId.isEmpty()) { 1306 cameraCharacteristics = mPhysicalCameraChars.get(physicalCameraId); 1307 } 1308 1309 int format = readers[i].getImageFormat(); 1310 if (format == ImageFormat.RAW_SENSOR) { 1311 if (mCaptureRawIsStats) { 1312 Rect activeArrayCropRegion = ItsUtils.getActiveArrayCropRegion( 1313 cameraCharacteristics, false); 1314 int aaw = activeArrayCropRegion.width(); 1315 int aah = activeArrayCropRegion.height(); 1316 jsonSurface.put("format", "rawStats"); 1317 jsonSurface.put("width", aaw / mCaptureStatsGridWidth); 1318 jsonSurface.put("height", aah / mCaptureStatsGridHeight); 1319 } else if (mCaptureRawIsQuadBayerStats) { 1320 Rect activeArrayCropRegion = ItsUtils.getActiveArrayCropRegion( 1321 cameraCharacteristics, true); 1322 int aaw = activeArrayCropRegion.width(); 1323 int aah = activeArrayCropRegion.height(); 1324 jsonSurface.put("format", "rawQuadBayerStats"); 1325 jsonSurface.put("width", aaw / mCaptureStatsGridWidth); 1326 jsonSurface.put("height", aah / mCaptureStatsGridHeight); 1327 } else if (mCaptureRawIsQuadBayer) { 1328 jsonSurface.put("format", "rawQuadBayer"); 1329 } else if (mCaptureRawIsDng) { 1330 jsonSurface.put("format", "dng"); 1331 } else { 1332 jsonSurface.put("format", "raw"); 1333 } 1334 } else if (format == ImageFormat.RAW10) { 1335 if (mCaptureRawIsStats) { 1336 Rect activeArrayCropRegion = ItsUtils.getActiveArrayCropRegion( 1337 cameraCharacteristics, false); 1338 int aaw = activeArrayCropRegion.width(); 1339 int aah = activeArrayCropRegion.height(); 1340 jsonSurface.put("format", "raw10Stats"); 1341 jsonSurface.put("width", aaw / mCaptureStatsGridWidth); 1342 jsonSurface.put("height", aah / mCaptureStatsGridHeight); 1343 } else if (mCaptureRawIsQuadBayerStats) { 1344 Rect activeArrayCropRegion = ItsUtils.getActiveArrayCropRegion( 1345 cameraCharacteristics, true); 1346 int aaw = activeArrayCropRegion.width(); 1347 int aah = activeArrayCropRegion.height(); 1348 jsonSurface.put("format", "raw10QuadBayerStats"); 1349 jsonSurface.put("width", aaw / mCaptureStatsGridWidth); 1350 jsonSurface.put("height", aah / mCaptureStatsGridHeight); 1351 } else if (mCaptureRawIsQuadBayer) { 1352 jsonSurface.put("format", "raw10QuadBayer"); 1353 } else { 1354 jsonSurface.put("format", "raw10"); 1355 } 1356 } else if (format == ImageFormat.RAW12) { 1357 jsonSurface.put("format", "raw12"); 1358 } else if (format == ImageFormat.JPEG) { 1359 jsonSurface.put("format", "jpeg"); 1360 } else if (format == ImageFormat.JPEG_R) { 1361 jsonSurface.put("format", JPEG_R_FMT); 1362 } else if (format == ImageFormat.HEIC_ULTRAHDR) { 1363 jsonSurface.put("format", HEIC_ULTRAHDR_FMT); 1364 } else if (format == ImageFormat.PRIVATE) { 1365 jsonSurface.put("format", "priv"); 1366 } else if (format == ImageFormat.YUV_420_888) { 1367 jsonSurface.put("format", "yuv"); 1368 } else if (format == ImageFormat.Y8) { 1369 jsonSurface.put("format", "y8"); 1370 } else { 1371 throw new ItsException("Invalid format"); 1372 } 1373 jsonSurfaces.put(jsonSurface); 1374 } 1375 1376 Map<String, CaptureResult> physicalMetadata = 1377 result.getPhysicalCameraResults(); 1378 JSONArray jsonPhysicalMetadata = new JSONArray(); 1379 for (Map.Entry<String, CaptureResult> pair : physicalMetadata.entrySet()) { 1380 JSONObject jsonOneMetadata = new JSONObject(); 1381 jsonOneMetadata.put(pair.getKey(), ItsSerializer.serialize(pair.getValue())); 1382 jsonPhysicalMetadata.put(jsonOneMetadata); 1383 } 1384 Object objs[] = new Object[4]; 1385 objs[0] = "captureResults"; 1386 objs[1] = result; 1387 objs[SERIALIZER_SURFACES_ID] = jsonSurfaces; 1388 objs[SERIALIZER_PHYSICAL_METADATA_ID] = jsonPhysicalMetadata; 1389 mSerializerQueue.put(objs); 1390 } catch (org.json.JSONException e) { 1391 throw new ItsException("JSON error: ", e); 1392 } catch (InterruptedException e) { 1393 throw new ItsException("Interrupted: ", e); 1394 } 1395 } 1396 } 1397 1398 public ImageReader.OnImageAvailableListener createAvailableListener(final CaptureCallback listener)1399 createAvailableListener(final CaptureCallback listener) { 1400 return new ImageReader.OnImageAvailableListener() { 1401 @Override 1402 public void onImageAvailable(ImageReader reader) { 1403 Image i = null; 1404 try { 1405 i = reader.acquireNextImage(); 1406 Logt.i(TAG, "Image timestamp: " + i.getTimestamp()); 1407 String physicalCameraId = new String(); 1408 for (int idx = 0; idx < mOutputImageReaders.length; idx++) { 1409 if (mOutputImageReaders[idx] == reader) { 1410 physicalCameraId = mPhysicalStreamMap.get(idx); 1411 } 1412 } 1413 listener.onCaptureAvailable(i, physicalCameraId); 1414 } finally { 1415 if (i != null) { 1416 i.close(); 1417 } 1418 } 1419 } 1420 }; 1421 } 1422 1423 public ImageReader.OnImageAvailableListener 1424 createExtensionAvailableListener(final CaptureCallback listener) { 1425 return new ImageReader.OnImageAvailableListener() { 1426 @Override 1427 public void onImageAvailable(ImageReader reader) { 1428 Image i = null; 1429 try { 1430 i = reader.acquireNextImage(); 1431 String physicalCameraId = new String(); 1432 for (int idx = 0; idx < mOutputImageReaders.length; idx++) { 1433 if (mOutputImageReaders[idx] == reader) { 1434 physicalCameraId = mPhysicalStreamMap.get(idx); 1435 break; 1436 } 1437 } 1438 listener.onCaptureAvailable(i, physicalCameraId); 1439 synchronized(mCountCallbacksRemaining) { 1440 mCountCallbacksRemaining.decrementAndGet(); 1441 mCountCallbacksRemaining.notify(); 1442 } 1443 } finally { 1444 if (i != null) { 1445 i.close(); 1446 } 1447 } 1448 } 1449 }; 1450 } 1451 1452 private ImageReader.OnImageAvailableListener 1453 createAvailableListenerDropper() { 1454 return new ImageReader.OnImageAvailableListener() { 1455 @Override 1456 public void onImageAvailable(ImageReader reader) { 1457 Image i = reader.acquireNextImage(); 1458 if (i != null) { 1459 i.close(); 1460 } 1461 } 1462 }; 1463 } 1464 1465 private void doStartSensorEvents() throws ItsException { 1466 synchronized(mEventLock) { 1467 mEvents.clear(); 1468 mEventsEnabled = true; 1469 } 1470 mSocketRunnableObj.sendResponse("sensorEventsStarted", ""); 1471 } 1472 1473 private void doCheckSensorExistence() throws ItsException { 1474 try { 1475 JSONObject obj = new JSONObject(); 1476 obj.put("accel", mAccelSensor != null); 1477 obj.put("mag", mMagSensor != null); 1478 obj.put("gyro", mGyroSensor != null); 1479 obj.put("rv", mRotationVector != null); 1480 obj.put("vibrator", mVibrator.hasVibrator()); 1481 mSocketRunnableObj.sendResponse("sensorExistence", null, obj, null); 1482 } catch (org.json.JSONException e) { 1483 throw new ItsException("JSON error: ", e); 1484 } 1485 } 1486 1487 private void doGetSensorEvents() throws ItsException { 1488 synchronized(mEventLock) { 1489 mSocketRunnableObj.sendResponse(mEvents); 1490 mEvents.clear(); 1491 mEventsEnabled = false; 1492 } 1493 } 1494 1495 private void doGetProps() throws ItsException { 1496 mSocketRunnableObj.sendResponse(mCameraCharacteristics); 1497 } 1498 1499 private void doGetPropsById(JSONObject params) throws ItsException { 1500 String[] devices; 1501 try { 1502 // Intentionally not using ItsUtils.getItsCompatibleCameraIds here so it's possible to 1503 // write some simple script to query camera characteristics even for devices exempted 1504 // from ITS today. 1505 devices = mCameraManager.getCameraIdList(); 1506 if (devices == null || devices.length == 0) { 1507 throw new ItsException("No camera devices"); 1508 } 1509 } catch (CameraAccessException e) { 1510 throw new ItsException("Failed to get device ID list", e); 1511 } 1512 1513 try { 1514 String cameraId = params.getString("cameraId"); 1515 CameraCharacteristics characteristics = null; 1516 if (params.has("overrideToPortrait")) { 1517 characteristics = mCameraManager.getCameraCharacteristics(cameraId, 1518 params.getBoolean("overrideToPortrait")); 1519 } else { 1520 characteristics = mCameraManager.getCameraCharacteristics(cameraId); 1521 } 1522 mSocketRunnableObj.sendResponse(characteristics); 1523 } catch (org.json.JSONException e) { 1524 throw new ItsException("JSON error: ", e); 1525 } catch (IllegalArgumentException e) { 1526 throw new ItsException("Illegal argument error:", e); 1527 } catch (CameraAccessException e) { 1528 throw new ItsException("Access error: ", e); 1529 } 1530 } 1531 1532 private void doGetAvailablePhysicalCameraProperties() throws ItsException { 1533 mSocketRunnableObj.sendResponse("availablePhysicalCameraProperties", mPhysicalCameraChars); 1534 } 1535 1536 1537 private void doCheckLowLightBoostAvailable(String cameraId, int extension) throws ItsException { 1538 try { 1539 mSocketRunnableObj.sendResponse("isLowLightBoostAvailable", 1540 isLowLightBoostAvailable(cameraId, extension) ? "true" : "false"); 1541 } catch (CameraAccessException e) { 1542 throw new ItsException("Failed to check low light boost supported", e); 1543 } 1544 } 1545 1546 private void doCheckNightModeIndicatorSupported(String cameraId) throws ItsException { 1547 try { 1548 mSocketRunnableObj.sendResponse("isNightModeIndicatorSupported", 1549 isNightModeIndicatorSupported(cameraId) ? "true" : "false"); 1550 } catch (CameraAccessException e) { 1551 throw new ItsException("Failed to check night mode indicator supported", e); 1552 } 1553 } 1554 1555 /** 1556 * Checks if low light boost AE mode is supported. 1557 * 1558 * This method queries for available AE modes and checks if low light boost is an available AE 1559 * mode. The method queries Camera2 if {@code extension} is -1. Otherwise, the method will 1560 * query the Camera Extension as defined by {@code extension}. If the extension is not 1561 * supported then false is returned. 1562 **/ 1563 private boolean isLowLightBoostAvailable(String cameraId, int extension) 1564 throws CameraAccessException { 1565 if (!ItsUtils.isAtLeastV()) { 1566 return false; 1567 } 1568 boolean isLowLightBoostSupported = false; 1569 int[] aeModes = null; 1570 if (extension == -1) { 1571 // Get available AE modes for Camera2 1572 CameraCharacteristics c = mCameraManager.getCameraCharacteristics(cameraId); 1573 aeModes = c.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES); 1574 } else { 1575 // Get available AE modes for Camera Extension mode if the extension is supported 1576 CameraExtensionCharacteristics c = 1577 mCameraManager.getCameraExtensionCharacteristics(cameraId); 1578 List<Integer> supportedExtensions = c.getSupportedExtensions(); 1579 if (supportedExtensions.contains(extension)) { 1580 aeModes = c.get(extension, CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES); 1581 } 1582 } 1583 return isLowLightBoostSupported = aeModes == null ? false : Ints.asList(aeModes) 1584 .contains(CameraMetadata.CONTROL_AE_MODE_ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY); 1585 } 1586 1587 /** 1588 * Checks if night mode indicator is supported. 1589 * 1590 * This method queries Camera2 and Camera Extension for the night mode indicator key and checks 1591 * if night mode extension is supported. 1592 **/ 1593 private boolean isNightModeIndicatorSupported(String cameraId) throws CameraAccessException { 1594 CameraExtensionCharacteristics extensionCharacteristics = 1595 mCameraManager.getCameraExtensionCharacteristics(cameraId); 1596 if (!extensionCharacteristics.getSupportedExtensions() 1597 .contains(CameraExtensionCharacteristics.EXTENSION_NIGHT)) { 1598 return false; 1599 } 1600 1601 boolean isExtensionNightModeIndicatorSupported = extensionCharacteristics 1602 .getAvailableCaptureResultKeys(CameraExtensionCharacteristics.EXTENSION_NIGHT) 1603 .contains(CaptureResult.EXTENSION_NIGHT_MODE_INDICATOR); 1604 boolean isNightModeIndicatorSupported = mCameraCharacteristics 1605 .getAvailableCaptureResultKeys() 1606 .contains(CaptureResult.EXTENSION_NIGHT_MODE_INDICATOR); 1607 1608 return isExtensionNightModeIndicatorSupported && isNightModeIndicatorSupported; 1609 } 1610 1611 private Set<String> getUnavailablePhysicalCameras( 1612 LinkedBlockingQueue<Pair<String, String>> queue, String cameraId) throws Exception { 1613 Set<String> unavailablePhysicalCameras = new HashSet<String>(); 1614 while (true) { 1615 Pair<String, String> unavailableIdCombo = queue.poll( 1616 AVAILABILITY_TIMEOUT_MS, java.util.concurrent.TimeUnit.MILLISECONDS); 1617 if (unavailableIdCombo == null) { 1618 // No more entries in the queue. Break out of the loop and return. 1619 break; 1620 } 1621 if (cameraId.equals(unavailableIdCombo.first)) { 1622 unavailablePhysicalCameras.add(unavailableIdCombo.second); 1623 } 1624 } 1625 return unavailablePhysicalCameras; 1626 } 1627 1628 private void doGetCameraIds() throws ItsException { 1629 if (mItsCameraIdList == null) { 1630 mItsCameraIdList = ItsUtils.getItsCompatibleCameraIds(mCameraManager); 1631 } 1632 if (mItsCameraIdList.mCameraIdCombos.size() == 0) { 1633 throw new ItsException("No camera devices"); 1634 } 1635 1636 try { 1637 JSONObject obj = new JSONObject(); 1638 JSONArray array = new JSONArray(); 1639 for (String id : mItsCameraIdList.mCameraIdCombos) { 1640 array.put(id); 1641 } 1642 obj.put("cameraIdArray", array); 1643 obj.put("primaryRearCameraId", mItsCameraIdList.mPrimaryRearCameraId); 1644 obj.put("primaryFrontCameraId", mItsCameraIdList.mPrimaryFrontCameraId); 1645 mSocketRunnableObj.sendResponse("cameraIds", obj); 1646 } catch (org.json.JSONException e) { 1647 throw new ItsException("JSON error: ", e); 1648 } 1649 } 1650 1651 private static class HandlerExecutor implements Executor { 1652 private final Handler mHandler; 1653 1654 public HandlerExecutor(Handler handler) { 1655 mHandler = handler; 1656 } 1657 1658 @Override 1659 public void execute(Runnable runCmd) { 1660 mHandler.post(runCmd); 1661 } 1662 } 1663 1664 private SessionConfiguration getSessionConfiguration(JSONObject params) 1665 throws ItsException { 1666 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); 1667 boolean has10bitOutput = prepareImageReadersWithOutputSpecs(jsonOutputSpecs, 1668 /*inputSize*/null, /*inputFormat*/0, /*maxInputBuffers*/0, 1669 /*backgroundRequest*/false, /*reuseSession*/ false); 1670 int numSurfaces = mOutputImageReaders.length; 1671 List<OutputConfiguration> outputConfigs = new ArrayList<>(numSurfaces); 1672 for (int i = 0; i < numSurfaces; i++) { 1673 OutputConfiguration config = new OutputConfiguration( 1674 mOutputImageReaders[i].getSurface()); 1675 if (mPhysicalStreamMap.get(i) != null) { 1676 config.setPhysicalCameraId(mPhysicalStreamMap.get(i)); 1677 } 1678 if (mStreamUseCaseMap.get(i) != null) { 1679 config.setStreamUseCase(mStreamUseCaseMap.get(i)); 1680 } 1681 boolean hlg10Compatible = 1682 isHlg10Compatible(mOutputImageReaders[i].getImageFormat()); 1683 if (has10bitOutput && hlg10Compatible) { 1684 config.setDynamicRangeProfile(DynamicRangeProfiles.HLG10); 1685 } 1686 outputConfigs.add(config); 1687 } 1688 1689 SessionConfiguration sessionConfig = new SessionConfiguration( 1690 SessionConfiguration.SESSION_REGULAR, outputConfigs, 1691 new HandlerExecutor(mCameraHandler), 1692 new CameraCaptureSession.StateCallback() { 1693 @Override 1694 public void onConfigured(CameraCaptureSession session) { 1695 } 1696 @Override 1697 public void onReady(CameraCaptureSession session) { 1698 } 1699 @Override 1700 public void onConfigureFailed(CameraCaptureSession session) { 1701 } 1702 @Override 1703 public void onClosed(CameraCaptureSession session) { 1704 } 1705 }); 1706 1707 CaptureRequest.Builder templateReq = null; 1708 if (params.has(SETTINGS_KEY)) { 1709 try { 1710 CaptureRequest.Builder defaultReq = mCamera.createCaptureRequest( 1711 CameraDevice.TEMPLATE_STILL_CAPTURE); 1712 JSONObject settingsObj = params.getJSONObject(SETTINGS_KEY); 1713 templateReq = ItsSerializer.deserialize(defaultReq, settingsObj); 1714 } catch (CameraAccessException e) { 1715 throw new ItsException("Failed to create capture request", e); 1716 } catch (org.json.JSONException e) { 1717 throw new ItsException("JSON error: ", e); 1718 } 1719 } 1720 1721 if (templateReq != null) { 1722 sessionConfig.setSessionParameters(templateReq.build()); 1723 } 1724 return sessionConfig; 1725 } 1726 1727 private void doCheckStreamCombination(JSONObject params) throws ItsException { 1728 try { 1729 String returnString; 1730 SessionConfiguration sessionConfig = getSessionConfiguration(params); 1731 1732 if (sessionConfig.getSessionParameters() == null) { 1733 returnString = mCamera.isSessionConfigurationSupported(sessionConfig) 1734 ? "supportedCombination" : "unsupportedCombination"; 1735 } else if (ItsUtils.isAtLeastV()) { 1736 returnString = doCheckStreamCombinationV(sessionConfig); 1737 } else { 1738 Log.i(TAG, 1739 "Querying session support with parameters on pre-V device " 1740 + "is not supported."); 1741 returnString = "unsupportedOperation"; 1742 } 1743 mSocketRunnableObj.sendResponse("streamCombinationSupport", returnString); 1744 1745 } catch (UnsupportedOperationException e) { 1746 mSocketRunnableObj.sendResponse("streamCombinationSupport", "unsupportedOperation"); 1747 } catch (IllegalArgumentException | CameraAccessException e) { 1748 throw new ItsException("Error checking stream combination", e); 1749 } 1750 } 1751 1752 @RequiresApi(Build.VERSION_CODES.VANILLA_ICE_CREAM) 1753 private String doCheckStreamCombinationV(SessionConfiguration sessionConfig) 1754 throws CameraAccessException { 1755 String returnString; 1756 if (!mCameraManager.isCameraDeviceSetupSupported(mCamera.getId())) { 1757 Log.i(TAG, 1758 "Attempting to query session support with parameters, but " 1759 + "CameraDeviceSetup is not supported."); 1760 returnString = "unsupportedOperation"; 1761 } else { 1762 CameraDevice.CameraDeviceSetup cameraDeviceSetup = 1763 mCameraManager.getCameraDeviceSetup(mCamera.getId()); 1764 boolean supported = cameraDeviceSetup.isSessionConfigurationSupported( 1765 sessionConfig); 1766 returnString = supported ? "supportedCombination" : "unsupportedCombination"; 1767 } 1768 return returnString; 1769 } 1770 1771 private void doGetSessionProps(JSONObject params) throws ItsException { 1772 if (ItsUtils.isAtLeastV()) { 1773 doGetSessionPropsV(params); 1774 } else { 1775 throw new ItsException("Attempting to query session characteristics on " 1776 + "OSes older than Android V."); 1777 } 1778 } 1779 1780 @RequiresApi(Build.VERSION_CODES.VANILLA_ICE_CREAM) 1781 private void doGetSessionPropsV(JSONObject params) throws ItsException { 1782 try { 1783 if (!mCameraManager.isCameraDeviceSetupSupported(mCamera.getId())) { 1784 throw new ItsException("Attempting to query session characteristics, but " 1785 + "CameraDeviceSetup is not supported."); 1786 } 1787 1788 SessionConfiguration sessionConfig = getSessionConfiguration(params); 1789 1790 CameraDevice.CameraDeviceSetup cameraDeviceSetup = 1791 mCameraManager.getCameraDeviceSetup(mCamera.getId()); 1792 CameraCharacteristics sessionProps = cameraDeviceSetup.getSessionCharacteristics( 1793 sessionConfig); 1794 1795 mSocketRunnableObj.sendResponse(sessionProps); 1796 } catch (android.hardware.camera2.CameraAccessException e) { 1797 throw new ItsException("Access error: ", e); 1798 } 1799 } 1800 1801 private void doCheckCameraPrivacyModeSupport() throws ItsException { 1802 boolean hasPrivacySupport = mSensorPrivacyManager 1803 .supportsSensorToggle(SensorPrivacyManager.Sensors.CAMERA); 1804 mSocketRunnableObj.sendResponse("cameraPrivacyModeSupport", 1805 hasPrivacySupport ? "true" : "false"); 1806 } 1807 1808 private void doGetUnavailablePhysicalCameras() throws ItsException { 1809 try { 1810 JSONArray cameras = new JSONArray(); 1811 JSONObject jsonObj = new JSONObject(); 1812 for (String p : mUnavailablePhysicalCameras) { 1813 cameras.put(p); 1814 } 1815 jsonObj.put("unavailablePhysicalCamerasArray", cameras); 1816 Log.i(TAG, "unavailablePhysicalCameras : " + 1817 List.of(mUnavailablePhysicalCameras.toString())); 1818 mSocketRunnableObj.sendResponse("unavailablePhysicalCameras", null, jsonObj, null); 1819 } catch (org.json.JSONException e) { 1820 throw new ItsException("JSON error: ", e); 1821 } 1822 } 1823 1824 private void doGetDisplaySize() throws ItsException { 1825 Size displaySize = getDisplaySize(); 1826 mSocketRunnableObj.sendResponse("displaySize", displaySize.toString()); 1827 } 1828 1829 private Size getDisplaySize() throws ItsException { 1830 WindowManager windowManager = getSystemService(WindowManager.class); 1831 if (windowManager == null) { 1832 throw new ItsException("No window manager."); 1833 } 1834 WindowMetrics metrics = windowManager.getCurrentWindowMetrics(); 1835 if (metrics == null) { 1836 throw new ItsException("No current window metrics in window manager."); 1837 } 1838 Rect windowBounds = metrics.getBounds(); 1839 1840 int width = windowBounds.width(); 1841 int height = windowBounds.height(); 1842 if (height > width) { 1843 height = width; 1844 width = windowBounds.height(); 1845 } 1846 1847 Size displaySize = new Size(width, height); 1848 return displaySize; 1849 } 1850 1851 private void doGetMaxCamcorderProfileSize(String cameraId) throws ItsException { 1852 validateCameraId(cameraId); 1853 1854 int cameraDeviceId = Integer.parseInt(cameraId); 1855 int maxArea = -1; 1856 Size maxProfileSize = new Size(0, 0); 1857 for (int profileId : CAMCORDER_PROFILE_QUALITIES_MAP.keySet()) { 1858 if (CamcorderProfile.hasProfile(cameraDeviceId, profileId)) { 1859 CamcorderProfile profile = CamcorderProfile.get(cameraDeviceId, profileId); 1860 if (profile == null) { 1861 throw new ItsException("Invalid camcorder profile for id " + profileId); 1862 } 1863 1864 int area = profile.videoFrameWidth * profile.videoFrameHeight; 1865 if (area > maxArea) { 1866 maxProfileSize = new Size(profile.videoFrameWidth, profile.videoFrameHeight); 1867 maxArea = area; 1868 } 1869 } 1870 } 1871 mSocketRunnableObj.sendResponse("maxCamcorderProfileSize", maxProfileSize.toString()); 1872 } 1873 1874 private void doCheckPrimaryCamera(String cameraId) throws ItsException { 1875 validateCameraId(cameraId); 1876 1877 boolean isPrimaryCamera = false; 1878 try { 1879 CameraCharacteristics c = mCameraManager.getCameraCharacteristics(cameraId); 1880 Integer cameraFacing = c.get(CameraCharacteristics.LENS_FACING); 1881 for (String id : mItsCameraIdList.mCameraIds) { 1882 c = mCameraManager.getCameraCharacteristics(id); 1883 Integer facing = c.get(CameraCharacteristics.LENS_FACING); 1884 if (cameraFacing.equals(facing)) { 1885 if (cameraId.equals(id)) { 1886 isPrimaryCamera = true; 1887 } else { 1888 isPrimaryCamera = false; 1889 } 1890 break; 1891 } 1892 } 1893 } catch (CameraAccessException e) { 1894 throw new ItsException("Failed to get camera characteristics", e); 1895 } 1896 1897 mSocketRunnableObj.sendResponse("primaryCamera", 1898 isPrimaryCamera ? "true" : "false"); 1899 } 1900 1901 private void doCheckHLG10SupportForProfile(String cameraId, int profileId) throws ItsException { 1902 validateCameraId(cameraId); 1903 1904 int cameraDeviceId = Integer.parseInt(cameraId); 1905 CamcorderProfile camcorderProfile = getCamcorderProfile(cameraDeviceId, profileId); 1906 assert (camcorderProfile != null); 1907 1908 Size videoSize = new Size(camcorderProfile.videoFrameWidth, 1909 camcorderProfile.videoFrameHeight); 1910 doCheckHLG10SupportInternal(cameraId, videoSize, camcorderProfile.videoBitRate, 1911 camcorderProfile.videoFrameRate); 1912 } 1913 1914 private void doCheckHLG10SupportForSizeAndFps(String cameraId, String videoSizeStr, int maxFps) 1915 throws ItsException { 1916 validateCameraId(cameraId); 1917 1918 Size videoSize = Size.parseSize(videoSizeStr); 1919 int cameraIdInt = Integer.parseInt(cameraId); 1920 int videoBitRate = ItsUtils.calculateBitrate(cameraIdInt, videoSize, maxFps); 1921 doCheckHLG10SupportInternal(cameraId, videoSize, videoBitRate, maxFps); 1922 } 1923 1924 private void doCheckHLG10SupportInternal(String cameraId, Size videoSize, 1925 int videoBitRate, int maxFps) throws ItsException { 1926 boolean cameraHLG10OutputSupported = false; 1927 try { 1928 CameraCharacteristics c = mCameraManager.getCameraCharacteristics(cameraId); 1929 int[] caps = c.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1930 cameraHLG10OutputSupported = IntStream.of(caps).anyMatch(x -> x == 1931 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT); 1932 } catch (CameraAccessException e) { 1933 throw new ItsException("Failed to get camera characteristics", e); 1934 } 1935 1936 MediaCodecList list = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1937 MediaFormat format = ItsUtils.initializeHLG10Format(videoSize, videoBitRate, 1938 maxFps); 1939 boolean codecSupported = (list.findEncoderForFormat(format) != null); 1940 Log.v(TAG, "codecSupported: " + codecSupported + ", cameraHLG10OutputSupported: " 1941 + cameraHLG10OutputSupported); 1942 1943 mSocketRunnableObj.sendResponse("hlg10Response", 1944 codecSupported && cameraHLG10OutputSupported ? "true" : "false"); 1945 } 1946 1947 private void doCheckP3Support(String cameraId) throws ItsException { 1948 boolean cameraP3OutputSupported = false; 1949 try { 1950 CameraCharacteristics c = mCameraManager.getCameraCharacteristics(cameraId); 1951 int[] caps = c.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1952 boolean colorSpaceProfilesSupported = IntStream.of(caps).anyMatch(x -> x 1953 == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES); 1954 if (colorSpaceProfilesSupported) { 1955 ColorSpaceProfiles colorSpaceProfiles = c.get( 1956 CameraCharacteristics.REQUEST_AVAILABLE_COLOR_SPACE_PROFILES); 1957 Set<ColorSpace.Named> colorSpaces = 1958 colorSpaceProfiles.getSupportedColorSpaces(ImageFormat.UNKNOWN); 1959 if (colorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) { 1960 cameraP3OutputSupported = true; 1961 } 1962 } 1963 } catch (CameraAccessException e) { 1964 throw new ItsException("Failed to get camera characteristics", e); 1965 } 1966 1967 mSocketRunnableObj.sendResponse("p3Response", cameraP3OutputSupported ? "true" : "false"); 1968 } 1969 1970 private void doCheckLandscapeToPortraitEnabled() throws ItsException { 1971 boolean enabled = SystemProperties.getBoolean(CameraManager.LANDSCAPE_TO_PORTRAIT_PROP, 1972 false); 1973 mSocketRunnableObj.sendResponse("landscapeToPortraitEnabledResponse", 1974 enabled ? "true" : "false"); 1975 } 1976 1977 private void doCheckPerformanceClass() throws ItsException { 1978 boolean isPerfClass = (Build.VERSION.MEDIA_PERFORMANCE_CLASS >= PERFORMANCE_CLASS_R); 1979 1980 mSocketRunnableObj.sendResponse("performanceClass", 1981 isPerfClass ? "true" : "false"); 1982 } 1983 1984 private void doCheckVicPerformanceClass() throws ItsException { 1985 boolean isPerfClass = (Build.VERSION.MEDIA_PERFORMANCE_CLASS 1986 >= Build.VERSION_CODES.VANILLA_ICE_CREAM); 1987 1988 mSocketRunnableObj.sendResponse("vicPerformanceClass", 1989 isPerfClass ? "true" : "false"); 1990 } 1991 1992 private double invokeCameraPerformanceTest(Class testClass, String testName, 1993 String cameraId, String metricName) throws ItsException { 1994 mResults.clear(); 1995 mCameraInstrumentation = new CameraTestInstrumentation(); 1996 MetricListener metricListener = new MetricListener() { 1997 @Override 1998 public void onResultMetric(Metric metric) { 1999 mResults.add(metric); 2000 } 2001 }; 2002 mCameraInstrumentation.initialize(this, metricListener); 2003 2004 Bundle bundle = new Bundle(); 2005 bundle.putString("camera-id", cameraId); 2006 bundle.putString("perf-measure", "on"); 2007 bundle.putString("perf-class-test", "on"); 2008 bundle.putByte("has-activity", (byte) 1); 2009 InstrumentationRegistry.registerInstance(mCameraInstrumentation, bundle); 2010 2011 JUnitCore testRunner = new JUnitCore(); 2012 Log.v(TAG, String.format("Execute Test: %s#%s", testClass.getSimpleName(), testName)); 2013 Request request = Request.method(testClass, testName); 2014 Result runResult = testRunner.run(request); 2015 if (!runResult.wasSuccessful()) { 2016 throw new ItsException("Camera PerformanceTest " + testClass.getSimpleName() + 2017 "#" + testName + " failed"); 2018 } 2019 2020 for (Metric m : mResults) { 2021 if (m.getMessage().equals(metricName) && m.getValues().length == 1) { 2022 return m.getValues()[0]; 2023 } 2024 } 2025 2026 throw new ItsException("Failed to look up " + metricName + 2027 " in Camera PerformanceTest result!"); 2028 } 2029 2030 private void doMeasureCameraLaunchMs(String cameraId) throws ItsException { 2031 double launchMs = invokeCameraPerformanceTest(PerformanceTest.class, 2032 "testCameraLaunch", cameraId, "camera_launch_average_time_for_all_cameras"); 2033 mSocketRunnableObj.sendResponse("cameraLaunchMs", Double.toString(launchMs)); 2034 } 2035 2036 private void doMeasureCamera1080pJpegCaptureMs(String cameraId) throws ItsException { 2037 double jpegCaptureMs = invokeCameraPerformanceTest(PerformanceTest.class, 2038 "testSingleCapture", cameraId, 2039 "camera_capture_average_latency_for_all_cameras_jpeg"); 2040 mSocketRunnableObj.sendResponse("camera1080pJpegCaptureMs", Double.toString(jpegCaptureMs)); 2041 } 2042 2043 private static long getReaderUsage(int format, boolean has10bitOutput, int inputFormat) { 2044 // Private image format camera readers will default to ZSL usage unless 2045 // explicitly configured to use a common consumer such as display. 2046 // We don't support the ZSL use case for the 10-bit use case, or if the input format 2047 // is not PRIVATE. 2048 boolean notForZslReprocess = (inputFormat != format); 2049 return (format == ImageFormat.PRIVATE && (has10bitOutput || notForZslReprocess)) 2050 ? HardwareBuffer.USAGE_COMPOSER_OVERLAY : HardwareBuffer.USAGE_CPU_READ_OFTEN; 2051 } 2052 2053 private List<OutputConfiguration> getCaptureOutputConfigurations( 2054 JSONArray jsonOutputSpecs, boolean is10bitOutputPresent) 2055 throws org.json.JSONException { 2056 int numSurfaces = mOutputImageReaders.length; 2057 List<OutputConfiguration> outputConfigs = 2058 new ArrayList<OutputConfiguration>(numSurfaces); 2059 for (int i = 0; i < numSurfaces; i++) { 2060 OutputConfiguration config = new OutputConfiguration( 2061 mOutputImageReaders[i].getSurface()); 2062 if (mPhysicalStreamMap.get(i) != null && 2063 !mPhysicalStreamMap.get(i).isEmpty()) { 2064 config.setPhysicalCameraId(mPhysicalStreamMap.get(i)); 2065 } 2066 if (mStreamUseCaseMap.get(i) != null) { 2067 config.setStreamUseCase(mStreamUseCaseMap.get(i)); 2068 } 2069 if (jsonOutputSpecs != null) { 2070 if (i < jsonOutputSpecs.length()) { 2071 JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i); 2072 int colorSpaceInt = surfaceObj.optInt( 2073 "colorSpace", ColorSpaceProfiles.UNSPECIFIED); 2074 if (colorSpaceInt != ColorSpaceProfiles.UNSPECIFIED) { 2075 config.setColorSpace(ColorSpace.Named.values()[colorSpaceInt]); 2076 } 2077 } 2078 } 2079 boolean hlg10Compatible = 2080 isHlg10Compatible(mOutputImageReaders[i].getImageFormat()); 2081 if (is10bitOutputPresent && hlg10Compatible) { 2082 // HLG10 is mandatory for all 10-bit output capable devices 2083 config.setDynamicRangeProfile(DynamicRangeProfiles.HLG10); 2084 } 2085 outputConfigs.add(config); 2086 } 2087 return outputConfigs; 2088 } 2089 2090 private void prepareImageReaders(ImageReaderArgs args, boolean reuseSession) { 2091 if (reuseSession && args.equals(mImageReaderArgs)) { 2092 Logt.i(TAG, "Reusing image readers."); 2093 return; 2094 } 2095 Logt.i(TAG, String.format(Locale.getDefault(), 2096 "Current imageReaderArgs: %s, mImageReaderArgs: %s", args, mImageReaderArgs)); 2097 Size[] outputSizes = args.getOutputSizes(); 2098 int[] outputFormats = args.getOutputFormats(); 2099 Size inputSize = args.getInputSize(); 2100 int inputFormat = args.getInputFormat(); 2101 int maxInputBuffers = args.getMaxInputBuffers(); 2102 boolean has10bitOutput = args.getHas10bitOutput(); 2103 closeImageReaders(); 2104 mOutputImageReaders = new ImageReader[outputSizes.length]; 2105 for (int i = 0; i < outputSizes.length; i++) { 2106 // Check if the output image reader can be shared with the input image reader. 2107 if (outputSizes[i].equals(inputSize) && outputFormats[i] == inputFormat) { 2108 mOutputImageReaders[i] = ImageReader.newInstance(outputSizes[i].getWidth(), 2109 outputSizes[i].getHeight(), outputFormats[i], 2110 MAX_CONCURRENT_READER_BUFFERS + maxInputBuffers, 2111 getReaderUsage(outputFormats[i], has10bitOutput, inputFormat)); 2112 mInputImageReader = mOutputImageReaders[i]; 2113 } else { 2114 mOutputImageReaders[i] = ImageReader.newInstance(outputSizes[i].getWidth(), 2115 outputSizes[i].getHeight(), outputFormats[i], 2116 MAX_CONCURRENT_READER_BUFFERS, getReaderUsage(outputFormats[i], 2117 has10bitOutput, inputFormat)); 2118 } 2119 } 2120 2121 if (inputSize != null && mInputImageReader == null) { 2122 mInputImageReader = ImageReader.newInstance(inputSize.getWidth(), inputSize.getHeight(), 2123 inputFormat, maxInputBuffers, 2124 getReaderUsage(inputFormat, has10bitOutput, inputFormat)); 2125 } 2126 mImageReaderArgs = ImageReaderArgs.valueOf(outputSizes, outputFormats, inputSize, 2127 inputFormat, maxInputBuffers, has10bitOutput); 2128 } 2129 2130 private void closeImageReaders() { 2131 Logt.i(TAG, "Closing image readers"); 2132 if (mOutputImageReaders != null) { 2133 for (int i = 0; i < mOutputImageReaders.length; i++) { 2134 if (mOutputImageReaders[i] != null) { 2135 mOutputImageReaders[i].close(); 2136 mOutputImageReaders[i] = null; 2137 } 2138 } 2139 } 2140 mOutputImageReaders = null; 2141 if (mInputImageReader != null) { 2142 mInputImageReader.close(); 2143 mInputImageReader = null; 2144 } 2145 if (mThreeAOutputImageReader != null) { 2146 mThreeAOutputImageReader.close(); 2147 mThreeAOutputImageReader = null; 2148 } 2149 mImageReaderArgs = ImageReaderArgs.EMPTY; 2150 } 2151 2152 private void do3A(JSONObject params) throws ItsException { 2153 ThreeAResultListener threeAListener = new ThreeAResultListener(); 2154 boolean reuseSession = params.optBoolean("reuseSession", false); 2155 boolean firstSurfaceFor3A = params.optBoolean("firstSurfaceFor3A", false); 2156 List<OutputConfiguration> captureOutputConfigurations = new ArrayList<>(); 2157 try { 2158 // Start a 3A action, and wait for it to converge. 2159 // Get the converged values for each "A", and package into JSON result for caller. 2160 2161 // Configure streams on physical sub-camera if PHYSICAL_ID_KEY is specified. 2162 String physicalId = null; 2163 CameraCharacteristics c = mCameraCharacteristics; 2164 if (params.has(PHYSICAL_ID_KEY)) { 2165 physicalId = params.getString(PHYSICAL_ID_KEY); 2166 c = mPhysicalCameraChars.get(physicalId); 2167 } 2168 2169 // Prepare capture image readers here, and skip when doing the actual capture 2170 if (reuseSession) { 2171 Logt.i(TAG, "Preparing capture image readers in 3A"); 2172 JSONArray jsonCaptureOutputSpecs = ItsUtils.getOutputSpecs(params); 2173 List<CaptureRequest.Builder> backgroundRequests = 2174 ItsSerializer.deserializeRequestList(mCamera, params, "repeatRequests"); 2175 boolean backgroundRequest = backgroundRequests.size() > 0; 2176 boolean has10bitOutput = prepareImageReadersWithOutputSpecs(jsonCaptureOutputSpecs, 2177 /*inputSize*/null, /*inputFormat*/0, /*maxInputBuffers*/0, 2178 backgroundRequest, reuseSession); 2179 captureOutputConfigurations = 2180 getCaptureOutputConfigurations(jsonCaptureOutputSpecs, has10bitOutput); 2181 } 2182 2183 // Configure output format and size for 3A session, and prepare ImageReader. 2184 if (mThreeAOutputImageReader == null) { 2185 if (firstSurfaceFor3A && reuseSession) { 2186 mThreeAOutputImageReader = mOutputImageReaders[0]; 2187 } else { 2188 Logt.i(TAG, "Setting up 3A image reader"); 2189 int outputFormat = ImageFormat.YUV_420_888; 2190 Size size = ItsUtils.getYuvOutputSizes(c)[0]; 2191 mThreeAOutputImageReader = ImageReader.newInstance( 2192 size.getWidth(), size.getHeight(), outputFormat, 2193 MAX_CONCURRENT_READER_BUFFERS, 2194 getReaderUsage(outputFormat, /*has10bitOutput=*/false, 2195 /*inputFormat*/-1)); 2196 } 2197 } 2198 2199 // Add all necessary output configurations for the capture session 2200 List<OutputConfiguration> sessionOutputConfigs = new ArrayList<>(); 2201 for (OutputConfiguration config : captureOutputConfigurations) { 2202 sessionOutputConfigs.add(config); 2203 } 2204 if (!firstSurfaceFor3A) { 2205 OutputConfiguration threeAConfig = 2206 new OutputConfiguration(mThreeAOutputImageReader.getSurface()); 2207 if (physicalId != null) { 2208 threeAConfig.setPhysicalCameraId(physicalId); 2209 } 2210 sessionOutputConfigs.add(threeAConfig); 2211 } 2212 2213 if (mSession != null && reuseSession && 2214 mCaptureOutputConfigs.equals(captureOutputConfigurations)) { 2215 Logt.i(TAG, "Reusing camera capture session in 3A."); 2216 } else { 2217 Logt.i(TAG, "Need to create new capture session in 3A"); 2218 if (mSession != null) { 2219 mSession.close(); 2220 } 2221 mSessionListener = new BlockingSessionCallback(); 2222 mCamera.createCaptureSessionByOutputConfigurations( 2223 sessionOutputConfigs, mSessionListener, mCameraHandler); 2224 mSession = mSessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 2225 mSessionListener.getStateWaiter().waitForState( 2226 BlockingSessionCallback.SESSION_READY, TIMEOUT_SESSION_READY); 2227 Logt.i(TAG, "New capture session created."); 2228 } 2229 mCaptureOutputConfigs = new ArrayList<OutputConfiguration>(captureOutputConfigurations); 2230 2231 // Add a listener that just recycles buffers; they aren't saved anywhere. 2232 ImageReader.OnImageAvailableListener readerListener = 2233 createAvailableListenerDropper(); 2234 mThreeAOutputImageReader.setOnImageAvailableListener(readerListener, mSaveHandlers[0]); 2235 2236 // Get the user-specified regions for AE, AWB, AF. 2237 // Note that the user specifies normalized [x,y,w,h], which is converted below 2238 // to an [x0,y0,x1,y1] region in sensor coords. The capture request region 2239 // also has a fifth "weight" element: [x0,y0,x1,y1,w]. 2240 // Use logical camera's active array size for 3A regions. 2241 Rect activeArray = mCameraCharacteristics.get( 2242 CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); 2243 int aaWidth = activeArray.right - activeArray.left; 2244 int aaHeight = activeArray.bottom - activeArray.top; 2245 MeteringRectangle[] regionAE = new MeteringRectangle[]{ 2246 new MeteringRectangle(0,0,aaWidth,aaHeight,1)}; 2247 MeteringRectangle[] regionAF = new MeteringRectangle[]{ 2248 new MeteringRectangle(0,0,aaWidth,aaHeight,1)}; 2249 MeteringRectangle[] regionAWB = new MeteringRectangle[]{ 2250 new MeteringRectangle(0,0,aaWidth,aaHeight,1)}; 2251 if (params.has(REGION_KEY)) { 2252 JSONObject regions = params.getJSONObject(REGION_KEY); 2253 if (regions.has(REGION_AE_KEY)) { 2254 regionAE = ItsUtils.getJsonWeightedRectsFromArray( 2255 regions.getJSONArray(REGION_AE_KEY), true, aaWidth, aaHeight); 2256 } 2257 if (regions.has(REGION_AF_KEY)) { 2258 regionAF = ItsUtils.getJsonWeightedRectsFromArray( 2259 regions.getJSONArray(REGION_AF_KEY), true, aaWidth, aaHeight); 2260 } 2261 if (regions.has(REGION_AWB_KEY)) { 2262 regionAWB = ItsUtils.getJsonWeightedRectsFromArray( 2263 regions.getJSONArray(REGION_AWB_KEY), true, aaWidth, aaHeight); 2264 } 2265 } 2266 2267 // An EV compensation can be specified as part of AE convergence. 2268 int evComp = params.optInt(EVCOMP_KEY, 0); 2269 if (evComp != 0) { 2270 Logt.i(TAG, String.format("Running 3A with AE exposure compensation value: %d", evComp)); 2271 } 2272 2273 int flashMode = params.optInt(FLASH_MODE_KEY, CaptureRequest.FLASH_MODE_OFF); 2274 if (flashMode != CaptureRequest.FLASH_MODE_OFF) { 2275 Logt.i(TAG, String.format("Running 3A with FLASH_MODE: %d", flashMode)); 2276 } 2277 2278 // Auto flash can be specified as part of AE convergence. 2279 boolean autoFlash = params.optBoolean(AUTO_FLASH_KEY, false); 2280 if (autoFlash == true) { 2281 Logt.i(TAG, String.format("Running with auto flash mode.")); 2282 } 2283 2284 double zoomRatio = params.optDouble(ZOOM_RATIO_KEY); 2285 if (!Double.isNaN(zoomRatio)) { 2286 Logt.i(TAG, String.format("Running 3A with zoom ratio: %f", zoomRatio)); 2287 } 2288 2289 // By default, AE and AF both get triggered, but the user can optionally override this. 2290 // Also, AF won't get triggered if the lens is fixed-focus. 2291 if (params.has(TRIGGER_KEY)) { 2292 JSONObject triggers = params.getJSONObject(TRIGGER_KEY); 2293 if (triggers.has(TRIGGER_AE_KEY)) { 2294 mDoAE = triggers.getBoolean(TRIGGER_AE_KEY); 2295 } 2296 if (triggers.has(TRIGGER_AF_KEY)) { 2297 mDoAF = triggers.getBoolean(TRIGGER_AF_KEY); 2298 } 2299 } 2300 2301 boolean isFixedFocusLens = ItsUtils.isFixedFocusLens(c); 2302 if (mDoAF && isFixedFocusLens) { 2303 // Send a fake result back for the code that is waiting for this message to see 2304 // that AF has converged. 2305 Logt.i(TAG, "Ignoring request for AF on fixed-focus camera"); 2306 mSocketRunnableObj.sendResponse("afResult", "0.0"); 2307 mDoAF = false; 2308 } 2309 2310 mInterlock3A.open(); 2311 synchronized(m3AStateLock) { 2312 // If AE or AWB lock is specified, then the 3A will converge first and then lock these 2313 // values, waiting until the HAL has reported that the lock was successful. 2314 mNeedsLockedAE = params.optBoolean(LOCK_AE_KEY, false); 2315 mNeedsLockedAWB = params.optBoolean(LOCK_AWB_KEY, false); 2316 mConvergedAE = false; 2317 mConvergedAWB = false; 2318 mConvergedAF = false; 2319 mLockedAE = false; 2320 mLockedAWB = false; 2321 } 2322 long tstart = System.currentTimeMillis(); 2323 boolean triggeredAE = false; 2324 boolean triggeredAF = false; 2325 2326 Logt.i(TAG, String.format("Initiating 3A: AE:%d, AF:%d, AWB:1, AELOCK:%d, AWBLOCK:%d", 2327 mDoAE?1:0, mDoAF?1:0, mNeedsLockedAE?1:0, mNeedsLockedAWB?1:0)); 2328 2329 // Keep issuing capture requests until 3A has converged. 2330 while (true) { 2331 2332 // Block until can take the next 3A frame. Only want one outstanding frame 2333 // at a time, to simplify the logic here. 2334 if (!mInterlock3A.block(TIMEOUT_3A * 1000) || 2335 System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) { 2336 throw new ItsException( 2337 "3A failed to converge after " + TIMEOUT_3A + " seconds.\n" + 2338 "AE converge state: " + mConvergedAE + ", \n" + 2339 "AF convergence state: " + mConvergedAF + ", \n" + 2340 "AWB convergence state: " + mConvergedAWB + "."); 2341 } 2342 mInterlock3A.close(); 2343 2344 synchronized(m3AStateLock) { 2345 // If not converged yet, issue another capture request. 2346 if ((mDoAE && (!triggeredAE || !mConvergedAE)) 2347 || !mConvergedAWB 2348 || (mDoAF && (!triggeredAF || !mConvergedAF)) 2349 || (mDoAE && mNeedsLockedAE && !mLockedAE) 2350 || (mNeedsLockedAWB && !mLockedAWB)) { 2351 2352 // Baseline capture request for 3A. 2353 CaptureRequest.Builder req = mCamera.createCaptureRequest( 2354 CameraDevice.TEMPLATE_PREVIEW); 2355 req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 2356 req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 2357 req.set(CaptureRequest.CONTROL_CAPTURE_INTENT, 2358 CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW); 2359 req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0); 2360 req.set(CaptureRequest.CONTROL_AE_LOCK, false); 2361 req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE); 2362 req.set(CaptureRequest.CONTROL_AF_MODE, 2363 CaptureRequest.CONTROL_AF_MODE_AUTO); 2364 req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF); 2365 req.set(CaptureRequest.CONTROL_AWB_MODE, 2366 CaptureRequest.CONTROL_AWB_MODE_AUTO); 2367 req.set(CaptureRequest.CONTROL_AWB_LOCK, false); 2368 req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB); 2369 // ITS only turns OIS on when it's explicitly requested 2370 req.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, 2371 CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF); 2372 2373 if (evComp != 0) { 2374 req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, evComp); 2375 } 2376 2377 if (flashMode != CaptureRequest.FLASH_MODE_OFF) { 2378 req.set(CaptureRequest.FLASH_MODE, flashMode); 2379 } 2380 2381 if (autoFlash == false) { 2382 req.set(CaptureRequest.CONTROL_AE_MODE, 2383 CaptureRequest.CONTROL_AE_MODE_ON); 2384 } else { 2385 req.set(CaptureRequest.CONTROL_AE_MODE, 2386 CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); 2387 } 2388 2389 if (!Double.isNaN(zoomRatio)) { 2390 req.set(CaptureRequest.CONTROL_ZOOM_RATIO, (float) zoomRatio); 2391 } 2392 2393 if (mConvergedAE && mNeedsLockedAE) { 2394 req.set(CaptureRequest.CONTROL_AE_LOCK, true); 2395 } 2396 if (mConvergedAWB && mNeedsLockedAWB) { 2397 req.set(CaptureRequest.CONTROL_AWB_LOCK, true); 2398 } 2399 2400 boolean triggering = false; 2401 // Trigger AE first. 2402 if (mDoAE && !triggeredAE) { 2403 Logt.i(TAG, "Triggering AE"); 2404 req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, 2405 CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); 2406 if (mDoAF) { 2407 req.set(CaptureRequest.CONTROL_AF_TRIGGER, 2408 CaptureRequest.CONTROL_AF_TRIGGER_CANCEL); 2409 } 2410 triggeredAE = true; 2411 triggering = true; 2412 } 2413 2414 // After AE has converged, trigger AF. 2415 if (mDoAF && !triggeredAF && (!mDoAE || (triggeredAE && mConvergedAE))) { 2416 Logt.i(TAG, "Triggering AF"); 2417 req.set(CaptureRequest.CONTROL_AF_TRIGGER, 2418 CaptureRequest.CONTROL_AF_TRIGGER_START); 2419 triggeredAF = true; 2420 triggering = true; 2421 } 2422 2423 req.addTarget(mThreeAOutputImageReader.getSurface()); 2424 2425 if (triggering) { 2426 // Send single request for AE/AF trigger 2427 mSession.capture(req.build(), 2428 threeAListener, mResultHandler); 2429 } else { 2430 // Use repeating request for non-trigger requests 2431 mSession.setRepeatingRequest(req.build(), 2432 threeAListener, mResultHandler); 2433 } 2434 } else { 2435 mSocketRunnableObj.sendResponse("3aConverged", ""); 2436 Logt.i(TAG, "3A converged"); 2437 break; 2438 } 2439 } 2440 } 2441 mSession.stopRepeating(); 2442 mSessionListener.getStateWaiter().waitForState( 2443 BlockingSessionCallback.SESSION_READY, TIMEOUT_SESSION_READY); 2444 Logt.i(TAG, "Session is ready again after doing 3A."); 2445 } catch (android.hardware.camera2.CameraAccessException e) { 2446 throw new ItsException("Access error: ", e); 2447 } catch (org.json.JSONException e) { 2448 throw new ItsException("JSON error: ", e); 2449 } finally { 2450 mSocketRunnableObj.sendResponse("3aDone", ""); 2451 // stop listener from updating 3A states 2452 threeAListener.stop(); 2453 if (mSession != null && !reuseSession) { 2454 closeImageReaders(); 2455 mSession.close(); 2456 mSession = null; 2457 } 2458 } 2459 } 2460 2461 private void doVibrate(JSONObject params) throws ItsException { 2462 try { 2463 if (mVibrator == null) { 2464 throw new ItsException("Unable to start vibrator"); 2465 } 2466 JSONArray patternArray = params.getJSONArray(VIB_PATTERN_KEY); 2467 int len = patternArray.length(); 2468 long pattern[] = new long[len]; 2469 for (int i = 0; i < len; i++) { 2470 pattern[i] = patternArray.getLong(i); 2471 } 2472 Logt.i(TAG, String.format("Starting vibrator, pattern length %d",len)); 2473 2474 // Mark the vibrator as alarm to test the audio restriction API 2475 // TODO: consider making this configurable 2476 AudioAttributes audioAttributes = new AudioAttributes.Builder() 2477 .setUsage(AudioAttributes.USAGE_ALARM).build(); 2478 mVibrator.vibrate(pattern, -1, audioAttributes); 2479 mSocketRunnableObj.sendResponse("vibrationStarted", ""); 2480 } catch (org.json.JSONException e) { 2481 throw new ItsException("JSON error: ", e); 2482 } 2483 } 2484 2485 private void doSetAudioRestriction(JSONObject params) throws ItsException { 2486 try { 2487 if (mCamera == null) { 2488 throw new ItsException("Camera is closed"); 2489 } 2490 int mode = params.getInt(AUDIO_RESTRICTION_MODE_KEY); 2491 mCamera.setCameraAudioRestriction(mode); 2492 Logt.i(TAG, String.format("Set audio restriction mode to %d", mode)); 2493 2494 mSocketRunnableObj.sendResponse("audioRestrictionSet", ""); 2495 } catch (org.json.JSONException e) { 2496 throw new ItsException("JSON error: ", e); 2497 } catch (android.hardware.camera2.CameraAccessException e) { 2498 throw new ItsException("Access error: ", e); 2499 } 2500 } 2501 2502 /** 2503 * Parse jsonOutputSpecs to get output surface sizes and formats. Create input and output 2504 * image readers for the parsed output surface sizes, output formats, and the given input 2505 * size and format. 2506 */ 2507 private boolean prepareImageReadersWithOutputSpecs(JSONArray jsonOutputSpecs, 2508 Size inputSize, int inputFormat, int maxInputBuffers, 2509 boolean backgroundRequest, boolean reuseSession) 2510 throws ItsException { 2511 final int TEN_BIT_CAPABILITY = 2512 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT; 2513 Size outputSizes[]; 2514 int outputFormats[]; 2515 int numSurfaces = 0; 2516 mPhysicalStreamMap.clear(); 2517 mStreamUseCaseMap.clear(); 2518 2519 boolean is10bitOutputPresent = false; 2520 if (jsonOutputSpecs != null) { 2521 try { 2522 numSurfaces = jsonOutputSpecs.length(); 2523 if (backgroundRequest) { 2524 numSurfaces += 1; 2525 } 2526 if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) { 2527 throw new ItsException("Too many output surfaces"); 2528 } 2529 2530 outputSizes = new Size[numSurfaces]; 2531 outputFormats = new int[numSurfaces]; 2532 for (int i = 0; i < numSurfaces; i++) { 2533 // Append optional background stream at the end 2534 if (backgroundRequest && i == numSurfaces - 1) { 2535 outputFormats[i] = ImageFormat.YUV_420_888; 2536 outputSizes[i] = new Size(640, 480); 2537 continue; 2538 } 2539 // Get the specified surface. 2540 JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i); 2541 String physicalCameraId = surfaceObj.optString("physicalCamera"); 2542 CameraCharacteristics cameraCharacteristics = mCameraCharacteristics; 2543 mPhysicalStreamMap.put(i, physicalCameraId); 2544 if (!physicalCameraId.isEmpty()) { 2545 cameraCharacteristics = mPhysicalCameraChars.get(physicalCameraId); 2546 } 2547 2548 String sformat = surfaceObj.optString("format"); 2549 Size sizes[]; 2550 if ("yuv".equals(sformat) || "".equals(sformat)) { 2551 // Default to YUV if no format is specified. 2552 outputFormats[i] = ImageFormat.YUV_420_888; 2553 sizes = ItsUtils.getYuvOutputSizes(cameraCharacteristics); 2554 } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) { 2555 outputFormats[i] = ImageFormat.JPEG; 2556 sizes = ItsUtils.getJpegOutputSizes(cameraCharacteristics); 2557 } else if (HEIC_ULTRAHDR_FMT.equals(sformat)) { 2558 outputFormats[i] = ImageFormat.HEIC_ULTRAHDR; 2559 sizes = ItsUtils.getHeicUltraHdrOutputSizes(cameraCharacteristics); 2560 int[] actualCapabilities = cameraCharacteristics.get( 2561 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 2562 is10bitOutputPresent = Arrays.asList(CameraTestUtils.toObject( 2563 actualCapabilities)).contains(TEN_BIT_CAPABILITY); 2564 } else if (JPEG_R_FMT.equals(sformat)) { 2565 outputFormats[i] = ImageFormat.JPEG_R; 2566 sizes = ItsUtils.getJpegOutputSizes(cameraCharacteristics); 2567 int[] actualCapabilities = cameraCharacteristics.get( 2568 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 2569 is10bitOutputPresent = Arrays.asList(CameraTestUtils.toObject( 2570 actualCapabilities)).contains(TEN_BIT_CAPABILITY); 2571 } else if ("priv".equals(sformat)) { 2572 outputFormats[i] = ImageFormat.PRIVATE; 2573 sizes = ItsUtils.getPrivOutputSizes(cameraCharacteristics); 2574 is10bitOutputPresent = surfaceObj.optBoolean("hlg10"); 2575 } else if ("raw".equals(sformat)) { 2576 outputFormats[i] = ImageFormat.RAW_SENSOR; 2577 sizes = ItsUtils.getRaw16OutputSizes(cameraCharacteristics); 2578 } else if ("rawQuadBayer".equals(sformat)) { 2579 outputFormats[i] = ImageFormat.RAW_SENSOR; 2580 sizes = ItsUtils.getRaw16MaxResulolutionOutputSizes(cameraCharacteristics); 2581 mCaptureRawIsQuadBayer = true; 2582 } else if ("rawStats".equals(sformat)) { 2583 outputFormats[i] = ImageFormat.RAW_SENSOR; 2584 sizes = ItsUtils.getRaw16OutputSizes(cameraCharacteristics); 2585 mCaptureRawIsStats = true; 2586 mCaptureStatsGridWidth = surfaceObj.optInt("gridWidth"); 2587 mCaptureStatsGridHeight = surfaceObj.optInt("gridHeight"); 2588 } else if ("rawQuadBayerStats".equals(sformat)) { 2589 outputFormats[i] = ImageFormat.RAW_SENSOR; 2590 sizes = ItsUtils.getRaw16MaxResulolutionOutputSizes(cameraCharacteristics); 2591 mCaptureRawIsQuadBayerStats = true; 2592 mCaptureStatsGridWidth = surfaceObj.optInt("gridWidth"); 2593 mCaptureStatsGridHeight = surfaceObj.optInt("gridHeight"); 2594 } 2595 else if ("raw10".equals(sformat)) { 2596 outputFormats[i] = ImageFormat.RAW10; 2597 sizes = ItsUtils.getRaw10OutputSizes(cameraCharacteristics); 2598 } else if ("raw10QuadBayer".equals(sformat)) { 2599 outputFormats[i] = ImageFormat.RAW10; 2600 sizes = ItsUtils.getRaw10MaxResulolutionOutputSizes(cameraCharacteristics); 2601 mCaptureRawIsQuadBayer = true; 2602 } else if ("raw10Stats".equals(sformat)) { 2603 outputFormats[i] = ImageFormat.RAW10; 2604 sizes = ItsUtils.getRaw10OutputSizes(cameraCharacteristics); 2605 mCaptureRawIsStats = true; 2606 mCaptureStatsGridWidth = surfaceObj.optInt("gridWidth"); 2607 mCaptureStatsGridHeight = surfaceObj.optInt("gridHeight"); 2608 } else if ("raw10QuadBayerStats".equals(sformat)) { 2609 outputFormats[i] = ImageFormat.RAW10; 2610 sizes = ItsUtils.getRaw10MaxResulolutionOutputSizes(cameraCharacteristics); 2611 mCaptureRawIsQuadBayerStats = true; 2612 mCaptureStatsGridWidth = surfaceObj.optInt("gridWidth"); 2613 mCaptureStatsGridHeight = surfaceObj.optInt("gridHeight"); 2614 } else if ("raw12".equals(sformat)) { 2615 outputFormats[i] = ImageFormat.RAW12; 2616 sizes = ItsUtils.getRaw12OutputSizes(cameraCharacteristics); 2617 } else if ("dng".equals(sformat)) { 2618 outputFormats[i] = ImageFormat.RAW_SENSOR; 2619 sizes = ItsUtils.getRaw16OutputSizes(cameraCharacteristics); 2620 mCaptureRawIsDng = true; 2621 } else if ("y8".equals(sformat)) { 2622 outputFormats[i] = ImageFormat.Y8; 2623 sizes = ItsUtils.getY8OutputSizes(cameraCharacteristics); 2624 } else { 2625 throw new ItsException("Unsupported format: " + sformat); 2626 } 2627 // If the size is omitted, then default to the largest allowed size for the 2628 // format. 2629 int width = surfaceObj.optInt("width"); 2630 int height = surfaceObj.optInt("height"); 2631 if (width <= 0) { 2632 if (sizes == null || sizes.length == 0) { 2633 throw new ItsException(String.format( 2634 "Zero stream configs available for requested format: %s", 2635 sformat)); 2636 } 2637 width = ItsUtils.getMaxSize(sizes).getWidth(); 2638 } 2639 if (height <= 0) { 2640 height = ItsUtils.getMaxSize(sizes).getHeight(); 2641 } 2642 // The stats computation only applies to the active array region. 2643 boolean isMaximumResolution = 2644 mCaptureRawIsQuadBayer || mCaptureRawIsQuadBayerStats; 2645 Rect activeArrayCropRegion = ItsUtils.getActiveArrayCropRegion( 2646 cameraCharacteristics, isMaximumResolution); 2647 int aaw = activeArrayCropRegion.width(); 2648 int aah = activeArrayCropRegion.height(); 2649 if (mCaptureStatsGridWidth <= 0 || mCaptureStatsGridWidth > aaw) { 2650 mCaptureStatsGridWidth = aaw; 2651 } 2652 if (mCaptureStatsGridHeight <= 0 || mCaptureStatsGridHeight > aah) { 2653 mCaptureStatsGridHeight = aah; 2654 } 2655 2656 outputSizes[i] = new Size(width, height); 2657 if (!surfaceObj.isNull("useCase")) { 2658 mStreamUseCaseMap.put(i, surfaceObj.optLong("useCase")); 2659 } 2660 } 2661 } catch (org.json.JSONException e) { 2662 throw new ItsException("JSON error", e); 2663 } 2664 } else { 2665 // No surface(s) specified at all. 2666 // Default: a single output surface which is full-res YUV. 2667 Size maxYuvSize = ItsUtils.getMaxOutputSize( 2668 mCameraCharacteristics, ImageFormat.YUV_420_888); 2669 numSurfaces = backgroundRequest ? 2 : 1; 2670 2671 outputSizes = new Size[numSurfaces]; 2672 outputFormats = new int[numSurfaces]; 2673 outputSizes[0] = maxYuvSize; 2674 outputFormats[0] = ImageFormat.YUV_420_888; 2675 if (backgroundRequest) { 2676 outputSizes[1] = new Size(640, 480); 2677 outputFormats[1] = ImageFormat.YUV_420_888; 2678 } 2679 } 2680 2681 prepareImageReaders(ImageReaderArgs.valueOf(outputSizes, outputFormats, inputSize, 2682 inputFormat, maxInputBuffers, 2683 is10bitOutputPresent), reuseSession); 2684 2685 return is10bitOutputPresent; 2686 } 2687 2688 /** 2689 * Wait until mCountCallbacksRemaining is 0 or a specified amount of time has elapsed between 2690 * each callback. 2691 */ 2692 private void waitForCallbacks(long timeoutMs) throws ItsException { 2693 synchronized(mCountCallbacksRemaining) { 2694 int currentCount = mCountCallbacksRemaining.get(); 2695 while (currentCount > 0) { 2696 try { 2697 mCountCallbacksRemaining.wait(timeoutMs); 2698 } catch (InterruptedException e) { 2699 throw new ItsException("Waiting for callbacks was interrupted.", e); 2700 } 2701 2702 int newCount = mCountCallbacksRemaining.get(); 2703 if (newCount == currentCount) { 2704 throw new ItsException("No callback received within timeout " + 2705 timeoutMs + "ms"); 2706 } 2707 currentCount = newCount; 2708 } 2709 } 2710 } 2711 2712 private void doGetSupportedVideoQualities(String id) throws ItsException { 2713 int cameraId = Integer.parseInt(id); 2714 StringBuilder profiles = new StringBuilder(); 2715 for (Map.Entry<Integer, String> entry : CAMCORDER_PROFILE_QUALITIES_MAP.entrySet()) { 2716 appendSupportProfile(profiles, entry.getValue(), entry.getKey(), cameraId); 2717 } 2718 mSocketRunnableObj.sendResponse("supportedVideoQualities", profiles.toString()); 2719 } 2720 2721 private void doGetDefaultCameraPkgName() throws ItsException { 2722 PackageManager pkgMgr = getPackageManager(); 2723 Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); 2724 String pkgName = intent.resolveActivity(pkgMgr).getPackageName(); 2725 Log.i(TAG, "Default camera pkg name: " + pkgName); 2726 mSocketRunnableObj.sendResponse("defaultCameraPkg", pkgName); 2727 } 2728 2729 private void doGainMapCheck(JSONObject params) throws ItsException { 2730 String filePath; 2731 try { 2732 filePath = params.getString("filePath"); 2733 } catch(org.json.JSONException e) { 2734 throw new ItsException("JSON error: ", e); 2735 } 2736 Bitmap bitmapImage = BitmapFactory.decodeFile(filePath); 2737 assert(bitmapImage != null); 2738 boolean gainmapPresent = bitmapImage.hasGainmap(); 2739 Log.i(TAG, "Gainmap present? " + gainmapPresent); 2740 mSocketRunnableObj.sendResponse("gainmapPresent", 2741 gainmapPresent ? "true" : "false"); 2742 } 2743 2744 private void doGetSupportedVideoSizesCapped(String id) throws ItsException { 2745 int cameraId = Integer.parseInt(id); 2746 // s1440p which is the max supported stream size in a combination, when preview 2747 // stabilization is on. 2748 Size maxPreviewSize = new Size(1920, 1440); 2749 ArrayList<Size> outputSizes = new ArrayList<>(); 2750 for (Map.Entry<Integer, String> entry : CAMCORDER_PROFILE_QUALITIES_MAP.entrySet()) { 2751 if (CamcorderProfile.hasProfile(cameraId, entry.getKey())) { 2752 CamcorderProfile camcorderProfile = getCamcorderProfile(cameraId, entry.getKey()); 2753 assert(camcorderProfile != null); 2754 Size videoSize = new Size(camcorderProfile.videoFrameWidth, 2755 camcorderProfile.videoFrameHeight); 2756 outputSizes.add(videoSize); 2757 } 2758 } 2759 Log.i(TAG, "Supported video sizes: " + outputSizes.toString()); 2760 String response = outputSizes.stream() 2761 .distinct() 2762 .filter(s -> s.getWidth() * s.getHeight() 2763 <= maxPreviewSize.getWidth() * maxPreviewSize.getHeight()) 2764 .sorted(Comparator.comparingInt(s -> s.getWidth() * s.getHeight())) 2765 .map(Size::toString) 2766 .collect(Collectors.joining(";")); 2767 mSocketRunnableObj.sendResponse("supportedVideoSizes", response); 2768 } 2769 2770 private void appendSupportProfile(StringBuilder profiles, String name, int profile, 2771 int cameraId) { 2772 if (CamcorderProfile.hasProfile(cameraId, profile)) { 2773 profiles.append(name).append(':').append(profile).append(';'); 2774 } 2775 } 2776 2777 private boolean isVideoStabilizationModeSupported(int mode) { 2778 int[] videoStabilizationModes = mCameraCharacteristics.get( 2779 CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); 2780 List<Integer> arrList = Arrays.asList(CameraTestUtils.toObject(videoStabilizationModes)); 2781 assert(videoStabilizationModes != null); 2782 assert(arrList.contains(CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF)); 2783 Log.i(TAG, "videoStabilizationModes:" + Arrays.toString(videoStabilizationModes)); 2784 return arrList.contains(mode); 2785 } 2786 2787 private void doGetSupportedPreviewSizes(boolean filterRecordable) 2788 throws ItsException { 2789 StreamConfigurationMap configMap = mCameraCharacteristics.get( 2790 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 2791 if (!StreamConfigurationMap.isOutputSupportedFor(SurfaceHolder.class)) { 2792 mSocketRunnableObj.sendResponse("supportedPreviewSizes", ""); 2793 return; 2794 } 2795 2796 Size[] outputSizes = configMap.getOutputSizes(ImageFormat.YUV_420_888); 2797 if (outputSizes == null) { 2798 mSocketRunnableObj.sendResponse("supportedPreviewSizes", ""); 2799 return; 2800 } 2801 2802 Stream<Size> previewSizesStream = Arrays.stream(outputSizes).distinct(); 2803 if (filterRecordable) { 2804 Logt.i(TAG, "Filter preview sizes if supported by MediaRecorder"); 2805 previewSizesStream = previewSizesStream.filter( 2806 size -> isSizeSupportedByMediaRecorder(size)); 2807 } 2808 String response = previewSizesStream 2809 .sorted(Comparator.comparingInt(s -> s.getWidth() * s.getHeight())) 2810 .map(Size::toString) 2811 .collect(Collectors.joining(";")); 2812 2813 mSocketRunnableObj.sendResponse("supportedPreviewSizes", response); 2814 } 2815 2816 private boolean isSizeSupportedByMediaRecorder(Size previewSize) { 2817 Surface recordSurface = MediaCodec.createPersistentInputSurface(); 2818 2819 MediaRecorder mediaRecorder = new MediaRecorder(this); 2820 mediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT); 2821 mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE); 2822 mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.DEFAULT); 2823 2824 mediaRecorder.setVideoSize(previewSize.getWidth(), previewSize.getHeight()); 2825 mediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.DEFAULT); 2826 mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.DEFAULT); 2827 mediaRecorder.setInputSurface(recordSurface); 2828 mediaRecorder.setVideoFrameRate(30); 2829 String outputFile = getExternalCacheDir().getAbsolutePath() + "/its_test.mp4"; 2830 mediaRecorder.setOutputFile(outputFile); 2831 2832 try { 2833 mediaRecorder.prepare(); 2834 mediaRecorder.release(); 2835 recordSurface.release(); 2836 return true; 2837 } catch (IOException e) { 2838 Logt.i(TAG, "Error preparing MediaRecorder with " + previewSize + ". error = " + e); 2839 } 2840 return false; 2841 } 2842 2843 private void doGetQueryableStreamCombinations() throws ItsException { 2844 StaticMetadata staticMetadata = new StaticMetadata(mCameraCharacteristics); 2845 MaxStreamSizes maxStreamSizes = new MaxStreamSizes(staticMetadata, 2846 mCamera.getId(), (Context) this, /*matchSize*/true); 2847 StringBuilder responseBuilder = new StringBuilder(); 2848 int[][] queryableCombinations = maxStreamSizes.getQueryableCombinations(); 2849 for (int i = 0; i < queryableCombinations.length; i++) { 2850 String oneCombination = String.valueOf(queryableCombinations[i][0]) + "+"; 2851 for (int j = 1; j < queryableCombinations[i].length; j += 2) { 2852 String format = sFormatMap.get(queryableCombinations[i][j]); 2853 int sizeIndex = queryableCombinations[i][j + 1]; 2854 Size size = maxStreamSizes.getOutputSizeForFormat( 2855 queryableCombinations[i][j], sizeIndex); 2856 String oneStream = format + ":" + size.toString(); 2857 if (j > 1) { 2858 oneCombination += "+"; 2859 } 2860 oneCombination += oneStream; 2861 } 2862 2863 if (i > 0) { 2864 responseBuilder.append(";"); 2865 } 2866 responseBuilder.append(oneCombination); 2867 } 2868 2869 Log.i(TAG, "queryableStreamCombinations response is " + responseBuilder.toString()); 2870 mSocketRunnableObj.sendResponse("queryableStreamCombinations", responseBuilder.toString()); 2871 } 2872 2873 private void doGetSupportedExtensions(String id) throws ItsException { 2874 try { 2875 CameraExtensionCharacteristics chars = 2876 mCameraManager.getCameraExtensionCharacteristics(id); 2877 List<Integer> extensionsList = chars.getSupportedExtensions(); 2878 mSocketRunnableObj.sendResponse("supportedExtensions", extensionsList.toString()); 2879 } catch (CameraAccessException e) { 2880 throw new ItsException("Failed to get supported extensions list", e); 2881 } 2882 } 2883 2884 private void doGetSupportedExtensionSizes( 2885 String id, int extension, int format) throws ItsException { 2886 try { 2887 CameraExtensionCharacteristics chars = 2888 mCameraManager.getCameraExtensionCharacteristics(id); 2889 List<Size> extensionSizes = chars.getExtensionSupportedSizes(extension, format); 2890 String response = extensionSizes.stream() 2891 .distinct() 2892 .sorted(Comparator.comparingInt(s -> s.getWidth() * s.getHeight())) 2893 .map(Size::toString) 2894 .collect(Collectors.joining(";")); 2895 mSocketRunnableObj.sendResponse("supportedExtensionSizes", response); 2896 } catch (CameraAccessException e) { 2897 throw new ItsException("Failed to get supported extensions sizes list", e); 2898 } 2899 } 2900 2901 private void doGetSupportedExtensionPreviewSizes(String id, int extension) 2902 throws ItsException { 2903 try { 2904 CameraExtensionCharacteristics chars = 2905 mCameraManager.getCameraExtensionCharacteristics(id); 2906 List<Size> extensionSizes = chars.getExtensionSupportedSizes(extension, 2907 SurfaceTexture.class); 2908 String response = extensionSizes.stream() 2909 .distinct() 2910 .sorted(Comparator.comparingInt(s -> s.getWidth() * s.getHeight())) 2911 .map(Size::toString) 2912 .collect(Collectors.joining(";")); 2913 mSocketRunnableObj.sendResponse("supportedExtensionPreviewSizes", response); 2914 } catch (CameraAccessException e) { 2915 throw new ItsException("Failed to get supported extensions sizes list", e); 2916 } 2917 } 2918 2919 private void doBasicRecording(String cameraId, int profileId, String quality, 2920 int recordingDuration, int videoStabilizationMode, 2921 boolean hlg10Enabled, double zoomRatio, int aeTargetFpsMin, int aeTargetFpsMax, 2922 int aeAntibandingMode, int faceDetectMode) throws ItsException { 2923 RecordingResultListener recordingResultListener = new RecordingResultListener(); 2924 2925 if (!hlg10Enabled) { 2926 doBasicRecording( 2927 cameraId, profileId, quality, recordingDuration, videoStabilizationMode, 2928 zoomRatio, aeTargetFpsMin, aeTargetFpsMax, aeAntibandingMode, faceDetectMode); 2929 return; 2930 } 2931 2932 int cameraDeviceId = Integer.parseInt(cameraId); 2933 CamcorderProfile camcorderProfile = getCamcorderProfile(cameraDeviceId, profileId); 2934 assert (camcorderProfile != null); 2935 boolean supportsVideoStabilizationMode = isVideoStabilizationModeSupported( 2936 videoStabilizationMode); 2937 if (!supportsVideoStabilizationMode) { 2938 throw new ItsException("Device does not support video stabilization mode: " + 2939 videoStabilizationMode); 2940 } 2941 Size videoSize = new Size(camcorderProfile.videoFrameWidth, 2942 camcorderProfile.videoFrameHeight); 2943 int fileFormat = camcorderProfile.fileFormat; 2944 String outputFilePath = getOutputMediaFile(cameraDeviceId, videoSize, quality, fileFormat, 2945 /* hlg10Enabled= */ true, 2946 videoStabilizationMode, zoomRatio); 2947 assert (outputFilePath != null); 2948 2949 MediaCodecList list = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 2950 MediaFormat format = ItsUtils.initializeHLG10Format(videoSize, 2951 camcorderProfile.videoBitRate, camcorderProfile.videoFrameRate); 2952 2953 String codecName = list.findEncoderForFormat(format); 2954 assert (codecName != null); 2955 2956 int[] caps = mCameraCharacteristics.get( 2957 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 2958 assert ((caps != null) && IntStream.of(caps).anyMatch(x -> x == 2959 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)); 2960 2961 DynamicRangeProfiles profiles = mCameraCharacteristics.get( 2962 CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES); 2963 assert ((profiles != null) && 2964 profiles.getSupportedProfiles().contains(DynamicRangeProfiles.HLG10)); 2965 2966 MediaCodec mediaCodec = null; 2967 MediaMuxer muxer = null; 2968 Log.i(TAG, "Video recording outputFilePath:"+ outputFilePath); 2969 try { 2970 muxer = new MediaMuxer(outputFilePath, 2971 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); 2972 } catch (IOException e) { 2973 throw new ItsException("Error preparing the MediaMuxer."); 2974 } 2975 try { 2976 mediaCodec = MediaCodec.createByCodecName(codecName); 2977 } catch (IOException e) { 2978 throw new ItsException("Error preparing the MediaCodec."); 2979 } 2980 2981 mediaCodec.configure(format, null, null, 2982 MediaCodec.CONFIGURE_FLAG_ENCODE); 2983 Object condition = new Object(); 2984 mediaCodec.setCallback(new ItsUtils.MediaCodecListener(muxer, condition), mCameraHandler); 2985 2986 mRecordSurface = mediaCodec.createInputSurface(); 2987 assert(mRecordSurface != null); 2988 2989 CameraCaptureSession.StateCallback mockCallback = mock( 2990 CameraCaptureSession.StateCallback.class); 2991 // Configure and create capture session. 2992 try { 2993 configureAndCreateCaptureSession(CameraDevice.TEMPLATE_RECORD, mRecordSurface, 2994 videoStabilizationMode, /*ois=*/ false, DynamicRangeProfiles.HLG10, 2995 mockCallback, zoomRatio, aeTargetFpsMin, aeTargetFpsMax, 2996 recordingResultListener, /*extraConfigs*/null, aeAntibandingMode, 2997 faceDetectMode); 2998 } catch (CameraAccessException e) { 2999 throw new ItsException("Access error: ", e); 3000 } 3001 3002 Log.i(TAG, "Now recording video for quality: " + quality + " profile id: " + 3003 profileId + " cameraId: " + cameraDeviceId + " size: " + videoSize + " in HLG10!"); 3004 mediaCodec.start(); 3005 try { 3006 Thread.sleep(recordingDuration * 1000L); // recordingDuration is in seconds 3007 } catch (InterruptedException e) { 3008 throw new ItsException("Unexpected InterruptedException: ", e); 3009 } 3010 3011 mediaCodec.signalEndOfInputStream(); 3012 mSession.close(); 3013 verify(mockCallback, timeout(ItsUtils.SESSION_CLOSE_TIMEOUT_MS) 3014 .times(1)).onClosed(eq(mSession)); 3015 3016 synchronized (condition) { 3017 try { 3018 condition.wait(ItsUtils.SESSION_CLOSE_TIMEOUT_MS); 3019 } catch (InterruptedException e) { 3020 throw new ItsException("Unexpected InterruptedException: ", e); 3021 } 3022 } 3023 3024 muxer.stop(); 3025 mediaCodec.stop(); 3026 mediaCodec.release(); 3027 muxer.release(); 3028 mRecordSurface.release(); 3029 mRecordSurface = null; 3030 3031 Log.i(TAG, "10-bit Recording Done for quality: " + quality); 3032 3033 // Send VideoRecordingObject for further processing. 3034 VideoRecordingObject obj = new VideoRecordingObject(outputFilePath, 3035 quality, videoSize, camcorderProfile.videoFrameRate, fileFormat, zoomRatio, 3036 /*perFrameCaptureResults=*/ Collections.emptyList()); 3037 mSocketRunnableObj.sendVideoRecordingObject(obj); 3038 } 3039 3040 private void doBasicRecording(String cameraId, int profileId, String quality, 3041 int recordingDuration, int videoStabilizationMode, double zoomRatio, 3042 int aeTargetFpsMin, int aeTargetFpsMax, int aeAntibandingMode, int faceDetectMode) 3043 throws ItsException { 3044 RecordingResultListener recordingResultListener = new RecordingResultListener(); 3045 int cameraDeviceId = Integer.parseInt(cameraId); 3046 mMediaRecorder = new MediaRecorder(); 3047 CamcorderProfile camcorderProfile = getCamcorderProfile(cameraDeviceId, profileId); 3048 assert(camcorderProfile != null); 3049 boolean supportsVideoStabilizationMode = isVideoStabilizationModeSupported( 3050 videoStabilizationMode); 3051 if (!supportsVideoStabilizationMode) { 3052 throw new ItsException("Device does not support video stabilization mode: " + 3053 videoStabilizationMode); 3054 } 3055 Size videoSize = new Size(camcorderProfile.videoFrameWidth, 3056 camcorderProfile.videoFrameHeight); 3057 int fileFormat = camcorderProfile.fileFormat; 3058 String outputFilePath = getOutputMediaFile(cameraDeviceId, videoSize, quality, 3059 fileFormat, videoStabilizationMode, zoomRatio); 3060 assert(outputFilePath != null); 3061 Log.i(TAG, "Video recording outputFilePath:"+ outputFilePath); 3062 setupMediaRecorderWithProfile(camcorderProfile, outputFilePath); 3063 // Prepare MediaRecorder 3064 try { 3065 mMediaRecorder.prepare(); 3066 } catch (IOException e) { 3067 throw new ItsException("Error preparing the MediaRecorder."); 3068 } 3069 mRecordSurface = mMediaRecorder.getSurface(); 3070 // Configure and create capture session. 3071 try { 3072 configureAndCreateCaptureSession(CameraDevice.TEMPLATE_RECORD, mRecordSurface, 3073 videoStabilizationMode, /*ois=*/ false, DynamicRangeProfiles.STANDARD, 3074 /*stateCallback=*/ null, zoomRatio, aeTargetFpsMin, aeTargetFpsMax, 3075 recordingResultListener, /*extraConfigs*/null, aeAntibandingMode, 3076 faceDetectMode); 3077 } catch (android.hardware.camera2.CameraAccessException e) { 3078 throw new ItsException("Access error: ", e); 3079 } 3080 // Start Recording 3081 if (mMediaRecorder != null) { 3082 Log.i(TAG, "Now recording video for quality: " + quality + " profile id: " + 3083 profileId + " cameraId: " + cameraDeviceId + " size: " + videoSize); 3084 mMediaRecorder.start(); 3085 try { 3086 Thread.sleep(recordingDuration * 1000L); // recordingDuration is in seconds 3087 } catch (InterruptedException e) { 3088 throw new ItsException("Unexpected InterruptedException: ", e); 3089 } 3090 // Stop MediaRecorder 3091 mMediaRecorder.stop(); 3092 mSession.close(); 3093 mMediaRecorder.reset(); 3094 mMediaRecorder.release(); 3095 mMediaRecorder = null; 3096 if (mRecordSurface != null) { 3097 mRecordSurface.release(); 3098 mRecordSurface = null; 3099 } 3100 } 3101 3102 Log.i(TAG, "Recording Done for quality: " + quality); 3103 3104 // Send VideoRecordingObject for further processing. 3105 VideoRecordingObject obj = new VideoRecordingObject(outputFilePath, 3106 quality, videoSize, camcorderProfile.videoFrameRate, fileFormat, zoomRatio, 3107 /*perFrameCaptureResults=*/ Collections.emptyList()); 3108 mSocketRunnableObj.sendVideoRecordingObject(obj); 3109 } 3110 3111 /** 3112 * Sets up a PreviewRecorder with a surface set up as a preview. 3113 * 3114 * This method sets up 2 surfaces: an {@link ImageReader} surface and a 3115 * {@link MediaRecorder} surface. The ImageReader surface is set up with 3116 * {@link HardwareBuffer#USAGE_COMPOSER_OVERLAY} and set as the target of one or many capture 3117 * requests created with {@link CameraDevice#TEMPLATE_PREVIEW}. This should force the HAL to 3118 * use the Preview pipeline and output to the ImageReader. An {@link ImageWriter} pipes the 3119 * images from ImageReader to the MediaRecorder surface which is encoded into a video. 3120 */ 3121 private PreviewRecorder getPreviewRecorder(JSONObject cmdObj, String outputFilePath, 3122 Size videoSize, boolean hlg10Enabled) throws ItsException, JSONException { 3123 String cameraId = cmdObj.getString("cameraId"); 3124 int stabilizationMode = cmdObj.getInt("stabilizeMode"); 3125 int aeTargetFpsMax = cmdObj.optInt("aeTargetFpsMax"); 3126 3127 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.TIRAMISU) { 3128 throw new ItsException("Cannot record preview before API level 33"); 3129 } 3130 3131 if (!isVideoStabilizationModeSupported(stabilizationMode)) { 3132 throw new ItsException("Stabilization mode " + stabilizationMode + 3133 " requested, but not supported by device."); 3134 } 3135 3136 int[] caps = mCameraCharacteristics.get( 3137 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 3138 boolean support10Bit = (caps != null) && IntStream.of(caps).anyMatch(x -> x 3139 == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT); 3140 if (hlg10Enabled) { 3141 if (!support10Bit) { 3142 throw new ItsException("HLG10 requested, but 10-bit capability " 3143 + "is not supported by device."); 3144 } 3145 } 3146 3147 int cameraDeviceId = Integer.parseInt(cameraId); 3148 int sensorOrientation = Objects.requireNonNull( 3149 mCameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION), 3150 "Sensor orientation must not be null" 3151 ); 3152 3153 // By default aeTargetFpsMax is not set. In that case, default to 30 3154 if (aeTargetFpsMax == 0) { 3155 aeTargetFpsMax = 30; 3156 } 3157 return new PreviewRecorder(cameraDeviceId, videoSize, aeTargetFpsMax, 3158 sensorOrientation, outputFilePath, mCameraHandler, hlg10Enabled, 3159 getEncoderTimestampOffset(), this); 3160 } 3161 3162 private void doStaticPreviewRecording(JSONObject cmdObj) throws JSONException, ItsException { 3163 int recordingDuration = cmdObj.getInt("recordingDuration"); 3164 RecordingResultListener recordingResultListener = new RecordingResultListener(); 3165 mPreviewAction = new PreviewSleepAction( 3166 mCameraCharacteristics, 3167 mCameraHandler, 3168 recordingResultListener, 3169 recordingDuration * 1000L); 3170 doPreviewRecordingWithAction(cmdObj, mPreviewAction); 3171 } 3172 3173 private void doDynamicZoomPreviewRecording(JSONObject cmdObj) 3174 throws JSONException, ItsException { 3175 double zoomStart = cmdObj.getDouble("zoomStart"); 3176 double zoomEnd = cmdObj.getDouble("zoomEnd"); 3177 double stepSize = cmdObj.getDouble("stepSize"); 3178 long stepDuration = cmdObj.getLong("stepDuration"); 3179 RecordingResultListener recordingResultListener = new RecordingResultListener(); 3180 mPreviewAction = new PreviewDynamicZoomAction( 3181 mCameraCharacteristics, 3182 mCameraHandler, 3183 recordingResultListener, 3184 zoomStart, 3185 zoomEnd, 3186 stepSize, 3187 stepDuration); 3188 doPreviewRecordingWithAction(cmdObj, mPreviewAction); 3189 } 3190 3191 private void doDynamicMeteringRegionPreviewRecording(JSONObject cmdObj) 3192 throws JSONException, ItsException { 3193 JSONArray aeAwbRegionOne = cmdObj.getJSONArray("regionBlue"); 3194 JSONArray aeAwbRegionTwo = cmdObj.getJSONArray("regionLight"); 3195 JSONArray aeAwbRegionThree = cmdObj.getJSONArray("regionDark"); 3196 JSONArray aeAwbRegionFour = cmdObj.getJSONArray("regionYellow"); 3197 long aeAwbRegionDuration = cmdObj.getLong("aeAwbRegionDuration"); 3198 RecordingResultListener recordingResultListener = new RecordingResultListener(); 3199 mPreviewAction = new PreviewDynamicMeteringAction( 3200 mCameraCharacteristics, 3201 mCameraHandler, 3202 recordingResultListener, 3203 aeAwbRegionOne, 3204 aeAwbRegionTwo, 3205 aeAwbRegionThree, 3206 aeAwbRegionFour, 3207 aeAwbRegionDuration); 3208 doPreviewRecordingWithAction(cmdObj, mPreviewAction); 3209 } 3210 3211 /** 3212 * Records a video of a surface set up as a preview, performing an action while recording. 3213 */ 3214 private void doPreviewRecordingWithAction( 3215 JSONObject cmdObj, 3216 IntraPreviewAction action) 3217 throws JSONException, ItsException { 3218 String cameraId = cmdObj.getString("cameraId"); 3219 int stabilizationMode = cmdObj.getInt("stabilizeMode"); 3220 boolean ois = cmdObj.getBoolean("ois"); 3221 double zoomRatio = cmdObj.optDouble("zoomRatio"); 3222 // Override with zoomStart if zoomRatio was not specified 3223 zoomRatio = (Double.isNaN(zoomRatio)) ? cmdObj.optDouble("zoomStart") : zoomRatio; 3224 boolean paddedFrames = cmdObj.optBoolean("paddedFrames", false); 3225 int aeTargetFpsMin = cmdObj.optInt("aeTargetFpsMin"); 3226 int aeTargetFpsMax = cmdObj.optInt("aeTargetFpsMax"); 3227 int aeAntibandingMode = cmdObj.optInt("aeAntibandingMode", -1); 3228 int faceDetectMode = cmdObj.optInt("faceDetectMode"); 3229 // Record surface size and HDRness. 3230 JSONArray outputSpecs = ItsUtils.getOutputSpecs(cmdObj); 3231 if (outputSpecs == null || outputSpecs.length() == 0) { 3232 throw new ItsException("No output surfaces!"); 3233 } 3234 int recordSurfaceIndex = cmdObj.optInt("recordSurfaceIndex", 0); 3235 if (recordSurfaceIndex >= outputSpecs.length()) { 3236 throw new ItsException("Invalid recording surface index " + recordSurfaceIndex); 3237 } 3238 JSONObject recordSurfaceObj = outputSpecs.getJSONObject(recordSurfaceIndex); 3239 String format = recordSurfaceObj.optString("format"); 3240 if (!format.equals("priv")) { 3241 throw new ItsException("Record surface must be PRIV format!, but is " + format); 3242 } 3243 Size videoSize = new Size( 3244 recordSurfaceObj.getInt("width"), 3245 recordSurfaceObj.getInt("height")); 3246 boolean hlg10Enabled = recordSurfaceObj.optBoolean("hlg10"); 3247 3248 // Remove first output spec and use the rest to create ImageReaders 3249 List<OutputConfiguration> extraConfigs = null; 3250 outputSpecs.remove(recordSurfaceIndex); 3251 if (outputSpecs.length() > 0) { 3252 boolean is10bitOutputPresent = prepareImageReadersWithOutputSpecs( 3253 outputSpecs, /*inputSize*/null, /*inputFormat*/0, /*maxInputBuffers*/0, 3254 /*backgroundRequest*/false, /*reuseSession*/false); 3255 extraConfigs = getCaptureOutputConfigurations(outputSpecs, is10bitOutputPresent); 3256 } 3257 List<RecordingResult> recordingResults = new ArrayList<>(); 3258 RecordingResultListener recordingResultListener = action.getRecordingResultListener(); 3259 3260 int fileFormat = MediaRecorder.OutputFormat.DEFAULT; 3261 int cameraDeviceId = Integer.parseInt(cameraId); 3262 String outputFilePath = getOutputMediaFile(cameraDeviceId, videoSize, 3263 /* quality= */"preview", fileFormat, hlg10Enabled, stabilizationMode, 3264 zoomRatio, aeTargetFpsMin, aeTargetFpsMax); 3265 assert outputFilePath != null; 3266 3267 try (PreviewRecorder pr = getPreviewRecorder(cmdObj, outputFilePath, videoSize, 3268 hlg10Enabled)) { 3269 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 3270 long dynamicRangeProfile = hlg10Enabled ? DynamicRangeProfiles.HLG10 : 3271 DynamicRangeProfiles.STANDARD; 3272 pr.startRecording(); 3273 if (paddedFrames) { 3274 Logt.v(TAG, "Record Green frames at the beginning of the video"); 3275 pr.overrideCameraFrames(true); 3276 3277 // MediaRecorder APIs don't specify whether they're synchronous or asynchronous, 3278 // and different vendors seem to have interpret this differently. This delay 3279 // allows for MediaRecorder to complete the `startRecording` routine before 3280 // streaming frames from the camera. b/348332718 3281 try { 3282 Thread.sleep(PADDED_FRAMES_MS); 3283 } catch (InterruptedException e) { 3284 Logt.e(TAG, "Interrupted while waiting for MediaRecorder to prepare.", e); 3285 } 3286 } 3287 configureAndCreateCaptureSession(CameraDevice.TEMPLATE_PREVIEW, 3288 pr.getCameraSurface(), stabilizationMode, ois, dynamicRangeProfile, 3289 sessionListener, zoomRatio, aeTargetFpsMin, aeTargetFpsMax, 3290 recordingResultListener, extraConfigs, aeAntibandingMode, faceDetectMode); 3291 if (paddedFrames) { 3292 Logt.v(TAG, "Wait " + PADDED_FRAMES_MS + " msec for Green frames for padding"); 3293 try { 3294 Thread.sleep(PADDED_FRAMES_MS); 3295 } catch (InterruptedException e) { 3296 Logt.e(TAG, "Interrupted while waiting for green frames.", e); 3297 } 3298 3299 Logt.v(TAG, "Record Camera frames after green frames"); 3300 pr.overrideCameraFrames(false); 3301 } 3302 3303 action.execute(); 3304 3305 if (paddedFrames) { 3306 pr.overrideCameraFrames(true); 3307 try { 3308 Logt.v(TAG, "Record Green frames at the end of the video."); 3309 Thread.sleep(PADDED_FRAMES_MS); 3310 } catch (InterruptedException e) { 3311 Logt.e(TAG, "Interrupted while waiting for green frames.", e); 3312 } 3313 } 3314 3315 // Stop repeating request and ensure frames in flight are sent to MediaRecorder 3316 mSession.stopRepeating(); 3317 sessionListener.getStateWaiter().waitForState( 3318 BlockingSessionCallback.SESSION_READY, TIMEOUT_SESSION_READY); 3319 try { 3320 Logt.v(TAG, "Wait for recording to finish."); 3321 Thread.sleep(PADDED_FRAMES_MS * 2); 3322 } catch (InterruptedException e) { 3323 Logt.e(TAG, "Interrupted while waiting for recording to complete.", e); 3324 } 3325 pr.stopRecording(); 3326 mSession.close(); 3327 3328 int frameNum = 1; 3329 for (Long timestamp : pr.getFrameTimeStamps()) { 3330 if (recordingResultListener.getCaptureResultsMap().containsKey(timestamp)) { 3331 RecordingResult result = recordingResultListener.getCaptureResultsMap().get( 3332 timestamp); 3333 recordingResults.add(result); 3334 Logt.v(TAG, "Frame# " + frameNum + " timestamp: " + timestamp + " cr = " 3335 + result.mMap.values()); 3336 recordingResultListener.getCaptureResultsMap().remove(timestamp); 3337 } else { 3338 throw new ItsException("Frame# " + frameNum 3339 + " No RecordingResult found for timestamp: " + timestamp); 3340 } 3341 frameNum++; 3342 } 3343 } catch (CameraAccessException e) { 3344 throw new ItsException("Error configuring and creating capture request", e); 3345 } catch (InterruptedException e) { 3346 throw new ItsException("Interrupted while recording preview", e); 3347 } catch (IllegalStateException e) { 3348 closeCameraDevice(); 3349 throw new ItsException("Illegal session state exception", e); 3350 } 3351 3352 Log.i(TAG, "Preview recording complete: " + outputFilePath); 3353 // Send VideoRecordingObject for further processing. 3354 VideoRecordingObject obj = new VideoRecordingObject(outputFilePath, /* quality= */"preview", 3355 videoSize, fileFormat, zoomRatio, recordingResults); 3356 mSocketRunnableObj.sendVideoRecordingObject(obj); 3357 } 3358 3359 /** 3360 * Captures the nth frame of a surface set up as a preview. 3361 * 3362 * This method sets up an {@link ImageReader} surface. The ImageReader surface is set up with 3363 * {@link HardwareBuffer#USAGE_COMPOSER_OVERLAY} and set as the target of a capture request 3364 * created with {@link CameraDevice#TEMPLATE_PREVIEW}. This should force the HAL to use the 3365 * Preview pipeline and output to the ImageReader. 3366 **/ 3367 private void doCapturePreviewFrame(JSONObject params) 3368 throws org.json.JSONException, ItsException { 3369 int cameraDeviceId = Integer.parseInt(params.getString("cameraId")); 3370 Size previewSize = Size.parseSize(params.getString("previewSize")); 3371 int frameNumToCapture = params.getInt("frameNum"); 3372 int extension = params.getInt("extension"); 3373 3374 Log.i(TAG, "doCapturePreviewFrame [start] cameraId: " + cameraDeviceId 3375 + " previewSize: " + previewSize + " frameNum: " + frameNumToCapture 3376 + " extension: " + extension); 3377 3378 int sensorOrientation = mCameraCharacteristics.get( 3379 CameraCharacteristics.SENSOR_ORIENTATION); 3380 3381 // We don't invoke recording but a valid file is still required 3382 String quality = "preview"; 3383 int fileFormat = MediaRecorder.OutputFormat.DEFAULT; 3384 int stabilizationMode = CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF; 3385 float zoomRatio = 1.0f; 3386 String outputFilePath = getOutputMediaFile(cameraDeviceId, previewSize, 3387 quality, fileFormat, stabilizationMode, zoomRatio); 3388 assert outputFilePath != null; 3389 3390 int aeTargetFpsMax = 30; 3391 boolean prevSend3AResults = mSend3AResults; 3392 try (PreviewRecorder pr = new PreviewRecorder(cameraDeviceId, previewSize, aeTargetFpsMax, 3393 sensorOrientation, outputFilePath, mCameraHandler, /*hlg10Enabled*/false, 3394 getEncoderTimestampOffset(), this)) { 3395 CaptureRequest.Builder reqBuilder = mCamera.createCaptureRequest( 3396 CameraDevice.TEMPLATE_PREVIEW); 3397 JSONObject captureReqJSON = params.getJSONObject("captureRequest"); 3398 // Create deep copy of the original capture request. The deserialize operation strips 3399 // keys. The deep copy preserves the keys. 3400 JSONObject threeAReqJSON = new JSONObject(captureReqJSON.toString()); 3401 reqBuilder = ItsSerializer.deserialize(reqBuilder, captureReqJSON); 3402 CaptureRequest.Builder threeAReqBuilder = mCamera.createCaptureRequest( 3403 CameraDevice.TEMPLATE_PREVIEW); 3404 threeAReqBuilder = ItsSerializer.deserialize(threeAReqBuilder, threeAReqJSON); 3405 ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); 3406 // Do not send 3A results 3407 mSend3AResults = false; 3408 3409 // If extension is -1 then use Camera2 3410 if (extension == -1) { 3411 capturePreviewFrame( 3412 reqBuilder, 3413 threeAReqBuilder, 3414 frameNumToCapture, 3415 pr, 3416 outputStream); 3417 } else { 3418 capturePreviewFrameWithExtension( 3419 reqBuilder, 3420 threeAReqBuilder, 3421 frameNumToCapture, 3422 pr, 3423 outputStream, 3424 extension); 3425 } 3426 3427 Log.i(TAG, "Preview frame capture complete"); 3428 mSocketRunnableObj.sendResponseCaptureBuffer("jpegImage", 3429 ByteBuffer.wrap(outputStream.toByteArray())); 3430 } catch (CameraAccessException e) { 3431 Log.e(TAG, "doCapturePreviewFrame [error]", e); 3432 throw new ItsException("Error configuring and creating capture request", e); 3433 } catch (InterruptedException e) { 3434 Log.e(TAG, "doCapturePreviewFrame [error]", e); 3435 throw new ItsException("Interrupted while recording preview", e); 3436 } finally { 3437 mSend3AResults = prevSend3AResults; 3438 } 3439 } 3440 3441 private void capturePreviewFrame(CaptureRequest.Builder reqBuilder, 3442 CaptureRequest.Builder threeAReqBuilder, int frameNumToCapture, 3443 PreviewRecorder pr, OutputStream outputStream) 3444 throws ItsException, CameraAccessException, InterruptedException { 3445 Log.d(TAG, "capturePreviewFrame [start]"); 3446 CountDownLatch frameNumLatch = new CountDownLatch(frameNumToCapture + 1); 3447 PreviewFrameCaptureResultListener captureResultListener = 3448 new PreviewFrameCaptureResultListener(frameNumLatch); 3449 3450 Surface surface = pr.getCameraSurface(); 3451 reqBuilder.addTarget(surface); 3452 reqBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, 3453 CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE); 3454 OutputConfiguration outConfig = new OutputConfiguration(surface); 3455 3456 long[] availableStreamUseCases = mCameraCharacteristics.get( 3457 CameraCharacteristics.SCALER_AVAILABLE_STREAM_USE_CASES); 3458 long previewStreamUseCase = 3459 Long.valueOf(CameraCharacteristics.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW); 3460 if (availableStreamUseCases != null && Longs.asList(availableStreamUseCases).contains( 3461 previewStreamUseCase)) { 3462 outConfig.setStreamUseCase( 3463 CameraCharacteristics.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW); 3464 } 3465 3466 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 3467 SessionConfiguration sessionConfiguration = new SessionConfiguration( 3468 SessionConfiguration.SESSION_REGULAR, List.of(outConfig), 3469 new HandlerExecutor(mCameraHandler), 3470 new CameraCaptureSession.StateCallback() { 3471 @Override 3472 public void onConfigured(CameraCaptureSession session) { 3473 mSession = session; 3474 sessionListener.onConfigured(session); 3475 } 3476 3477 @Override 3478 public void onReady(CameraCaptureSession session) { 3479 sessionListener.onReady(session); 3480 } 3481 3482 @Override 3483 public void onConfigureFailed(CameraCaptureSession session) { 3484 Log.i(TAG, "CameraCaptureSession configuration failed."); 3485 sessionListener.onConfigureFailed(session); 3486 } 3487 3488 @Override 3489 public void onClosed(CameraCaptureSession session) { 3490 sessionListener.onClosed(session); 3491 } 3492 }); 3493 3494 // Create capture session 3495 mCamera.createCaptureSession(sessionConfiguration); 3496 3497 sessionListener.getStateWaiter().waitForState( 3498 BlockingSessionCallback.SESSION_READY, TIMEOUT_SESSION_READY); 3499 try { 3500 ThreeAResultListener threeAListener = new ThreeAResultListener(); 3501 Logt.i(TAG, "Triggering precapture sequence"); 3502 3503 mSession.setRepeatingRequest(reqBuilder.build(), threeAListener, 3504 mCameraHandler); 3505 synchronized (m3AStateLock) { 3506 mPrecaptureTriggered = false; 3507 mConvergeAETriggered = false; 3508 } 3509 3510 threeAReqBuilder.addTarget(surface); 3511 threeAReqBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, 3512 CameraMetadata.CONTROL_AF_TRIGGER_START); 3513 threeAReqBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, 3514 CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_START); 3515 3516 mSession.capture(threeAReqBuilder.build(), threeAListener, 3517 mCameraHandler); 3518 mInterlock3A.open(); 3519 long tstart = System.currentTimeMillis(); 3520 while (!mPrecaptureTriggered) { 3521 if (!mInterlock3A.block(TIMEOUT_3A * 1000) 3522 || System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) { 3523 throw new ItsException( 3524 "3A failed to converge after " + TIMEOUT_3A + " seconds.\n" 3525 + "AE converge state: " + mConvergedAE + "."); 3526 } 3527 } 3528 3529 tstart = System.currentTimeMillis(); 3530 while (!mConvergeAETriggered) { 3531 if (!mInterlock3A.block(TIMEOUT_3A * 1000) 3532 || System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) { 3533 throw new ItsException( 3534 "3A failed to converge after " + TIMEOUT_3A + " seconds.\n" 3535 + "AE converge state: " + mConvergedAE + "."); 3536 } 3537 } 3538 mInterlock3A.close(); 3539 Logt.i(TAG, "AE state after precapture sequence: " + mConvergeAETriggered); 3540 threeAListener.stop(); 3541 } catch (CameraAccessException e) { 3542 Log.e(TAG, "CameraCaptureSession configuration failed.", e); 3543 } 3544 3545 Log.d(TAG, "capturePreviewFrame [waiting for " + frameNumToCapture + " frames]"); 3546 // Wait until the requested number of frames have been received and then capture the frame 3547 mSession.setRepeatingRequest(reqBuilder.build(), captureResultListener, 3548 mCameraHandler); 3549 frameNumLatch.await(TIMEOUT_CAPTURE_PREVIEW_FRAME_SECONDS, TimeUnit.SECONDS); 3550 mSocketRunnableObj.sendResponseCaptureResult( 3551 captureResultListener.mCaptureRequest, 3552 captureResultListener.mCaptureResult, 3553 new ImageReader[] {}); 3554 Log.d(TAG, "capturePreviewFrame [getting frame]"); 3555 pr.getFrame(outputStream); 3556 3557 // Stop repeating request 3558 Log.d(TAG, "capturePreviewFrame [stopping repeating request]"); 3559 mSession.stopRepeating(); 3560 mSession.close(); 3561 sessionListener.getStateWaiter().waitForState( 3562 BlockingExtensionSessionCallback.SESSION_CLOSED, TIMEOUT_SESSION_CLOSE); 3563 Log.d(TAG, "capturePreviewFrame [end]"); 3564 } 3565 3566 private void capturePreviewFrameWithExtension(CaptureRequest.Builder reqBuilder, 3567 CaptureRequest.Builder threeAReqBuilder, int frameNumToCapture, 3568 PreviewRecorder pr, OutputStream outputStream, int extension) 3569 throws CameraAccessException, InterruptedException, ItsException { 3570 Log.d(TAG, "capturePreviewFrameWithExtension [start]"); 3571 3572 Surface surface = pr.getCameraSurface(); 3573 reqBuilder.addTarget(surface); 3574 reqBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, 3575 CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE); 3576 3577 CountDownLatch frameNumLatch = new CountDownLatch(frameNumToCapture + 1); 3578 ExtensionPreviewFrameCaptureResultListener captureResultListener = 3579 new ExtensionPreviewFrameCaptureResultListener(frameNumLatch, extension); 3580 3581 BlockingExtensionSessionCallback sessionListener = 3582 new BlockingExtensionSessionCallback(); 3583 Log.d(TAG, "capturePreviewFrameWithExtension [config and create extension session]"); 3584 configureAndCreateExtensionSession( 3585 /* previewSurface */ surface, 3586 /* captureSurface */ null, 3587 extension, 3588 sessionListener); 3589 3590 Log.d(TAG, "capturePreviewFrameWithExtension [start extension session]"); 3591 mExtensionSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS_EXTENSIONS); 3592 Executor executor = new HandlerExecutor(mResultHandler); 3593 try { 3594 ExtensionsThreeAResultListener threeAListener = new ExtensionsThreeAResultListener(); 3595 Logt.i(TAG, "Triggering precapture sequence"); 3596 mExtensionSession.setRepeatingRequest(reqBuilder.build(), executor, 3597 threeAListener); 3598 synchronized (m3AStateLock) { 3599 mPrecaptureTriggered = false; 3600 mConvergeAETriggered = false; 3601 } 3602 3603 threeAReqBuilder.addTarget(surface); 3604 threeAReqBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, 3605 CameraMetadata.CONTROL_AF_TRIGGER_START); 3606 threeAReqBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, 3607 CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_START); 3608 3609 mExtensionSession.capture(threeAReqBuilder.build(), executor, 3610 threeAListener); 3611 mInterlock3A.open(); 3612 3613 long tstart = System.currentTimeMillis(); 3614 while (!mPrecaptureTriggered) { 3615 if (!mInterlock3A.block(TIMEOUT_3A * 1000) 3616 || System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) { 3617 throw new ItsException( 3618 "3A failed to converge after " + TIMEOUT_3A + " seconds.\n" 3619 + "AE converge state: " + mConvergedAE + "."); 3620 } 3621 } 3622 3623 tstart = System.currentTimeMillis(); 3624 while (!mConvergeAETriggered) { 3625 if (!mInterlock3A.block(TIMEOUT_3A * 1000) 3626 || System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) { 3627 throw new ItsException( 3628 "3A failed to converge after " + TIMEOUT_3A + " seconds.\n" 3629 + "AE converge state: " + mConvergedAE + "."); 3630 } 3631 } 3632 mInterlock3A.close(); 3633 Logt.i(TAG, "AE state after precapture sequence: " + mConvergeAETriggered); 3634 threeAListener.stop(); 3635 } catch (CameraAccessException e) { 3636 Log.e(TAG, "CameraCaptureSession configuration failed.", e); 3637 } 3638 3639 mExtensionSession.setRepeatingRequest(reqBuilder.build(), 3640 executor, 3641 captureResultListener); 3642 3643 Log.d(TAG, "capturePreviewFrameWithExtension [wait for " + frameNumToCapture + " frames]"); 3644 // Wait until the requested number of frames have been received and then capture the frame 3645 frameNumLatch.await(TIMEOUT_CAPTURE_PREVIEW_FRAME_SECONDS, TimeUnit.SECONDS); 3646 mSocketRunnableObj.sendResponseCaptureResult( 3647 captureResultListener.mCaptureRequest, 3648 captureResultListener.mCaptureResult, 3649 new ImageReader[] {}); 3650 3651 Log.d(TAG, "capturePreviewFrameWithExtension [getting frame]"); 3652 pr.getFrame(outputStream); 3653 3654 Log.d(TAG, "capturePreviewFrameWithExtension [stop repeating request]"); 3655 mExtensionSession.stopRepeating(); 3656 mExtensionSession.close(); 3657 3658 sessionListener.getStateWaiter().waitForState( 3659 BlockingExtensionSessionCallback.SESSION_CLOSED, TIMEOUT_SESSION_CLOSE); 3660 Log.d(TAG, "capturePreviewFrameWithExtension [end]"); 3661 } 3662 3663 private Size pickPreviewResolution(Size captureSize, int extension) { 3664 int captureWidth = captureSize.getWidth(); 3665 int captureHeight = captureSize.getHeight(); 3666 List<Size> extensionSizes = mCameraExtensionCharacteristics.getExtensionSupportedSizes( 3667 extension, SurfaceTexture.class); 3668 StreamConfigurationMap configMap = mCameraCharacteristics.get( 3669 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 3670 Size[] outputSizes = configMap.getOutputSizes(SurfaceTexture.class); 3671 if (outputSizes == null || !Arrays.asList(outputSizes).contains(captureSize)) { 3672 Log.i(TAG, "Failed to find valid output size"); 3673 return null; 3674 } 3675 Log.i(TAG, "outputSizes: " + Arrays.toString(outputSizes)); 3676 Log.i(TAG, "extensionSizes: " + extensionSizes.toString()); 3677 // Pick preview size close to capture size, based on area 3678 Size previewSize = extensionSizes.stream() 3679 .distinct() 3680 .min(Comparator.comparingInt(s -> Math.abs( 3681 (s.getWidth() * s.getHeight()) - (captureWidth * captureHeight)))) 3682 .get(); 3683 Log.i(TAG, "previewSize for extension " + String.valueOf(extension) + 3684 ": " + previewSize.toString()); 3685 return previewSize; 3686 } 3687 3688 private Surface configureAndCreateExtensionSession( 3689 Surface captureSurface, 3690 int extension, 3691 CameraExtensionSession.StateCallback stateCallback) throws ItsException { 3692 return configureAndCreateExtensionSession( 3693 null, 3694 captureSurface, 3695 extension, 3696 stateCallback); 3697 } 3698 3699 private Surface configureAndCreateExtensionSession( 3700 Surface previewSurface, 3701 Surface captureSurface, 3702 int extension, 3703 CameraExtensionSession.StateCallback stateCallback) throws ItsException { 3704 List<OutputConfiguration> outputConfig = new ArrayList<>(); 3705 Size captureSize = null; 3706 if (captureSurface != null) { 3707 int captureWidth = mOutputImageReaders[0].getWidth(); 3708 int captureHeight = mOutputImageReaders[0].getHeight(); 3709 captureSize = new Size(captureWidth, captureHeight); 3710 Log.i(TAG, "Capture size: " + captureSize); 3711 outputConfig.add(new OutputConfiguration(captureSurface)); 3712 } 3713 3714 if (previewSurface == null) { 3715 Size previewSize = pickPreviewResolution(captureSize, extension); 3716 mExtensionPreviewImageReader = ImageReader.newInstance( 3717 previewSize.getWidth(), 3718 previewSize.getHeight(), 3719 ImageFormat.PRIVATE, 3720 MAX_CONCURRENT_READER_BUFFERS, 3721 HardwareBuffer.USAGE_CPU_READ_OFTEN | HardwareBuffer.USAGE_COMPOSER_OVERLAY); 3722 previewSurface = mExtensionPreviewImageReader.getSurface(); 3723 } 3724 3725 outputConfig.add(new OutputConfiguration(previewSurface)); 3726 3727 ExtensionSessionConfiguration extSessionConfig = new ExtensionSessionConfiguration( 3728 extension, outputConfig, 3729 new HandlerExecutor(mCameraHandler), 3730 stateCallback); 3731 // Create capture session 3732 try { 3733 mCamera.createExtensionSession(extSessionConfig); 3734 } catch (CameraAccessException e) { 3735 throw new ItsException("Error creating extension session: " + e); 3736 } 3737 return previewSurface; 3738 } 3739 3740 private void configureAndCreateCaptureSession(int requestTemplate, Surface recordSurface, 3741 int videoStabilizationMode, boolean ois, long dynamicRangeProfile, 3742 CameraCaptureSession.StateCallback stateCallback, 3743 double zoomRatio, int aeTargetFpsMin, int aeTargetFpsMax, 3744 CameraCaptureSession.CaptureCallback captureCallback, 3745 List<OutputConfiguration> extraConfigs, 3746 int aeAntibandingMode, int faceDetectMode) throws CameraAccessException { 3747 assert (recordSurface != null); 3748 // Create capture request builder 3749 mCaptureRequestBuilder = mCamera.createCaptureRequest(requestTemplate); 3750 3751 // handle optional arguments 3752 if (!Double.isNaN(zoomRatio)) { 3753 Logt.i(TAG, "zoomRatio set to " + zoomRatio); 3754 mCaptureRequestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, (float) zoomRatio); 3755 } 3756 if (aeTargetFpsMin > 0 && aeTargetFpsMax > 0) { 3757 Logt.i(TAG, "AE target FPS range: (" + aeTargetFpsMin + ", " + aeTargetFpsMax + ")"); 3758 mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, 3759 new Range<Integer>(aeTargetFpsMin, aeTargetFpsMax)); 3760 } 3761 if (aeAntibandingMode != -1) { 3762 Logt.i(TAG, "AE Antibanding Mode: " + aeAntibandingMode); 3763 mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, 3764 aeAntibandingMode); 3765 } 3766 if (faceDetectMode > 0) { 3767 Logt.i(TAG, "Face Detection Mode: " + faceDetectMode); 3768 mCaptureRequestBuilder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, 3769 faceDetectMode); 3770 } 3771 3772 switch (videoStabilizationMode) { 3773 case CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON: 3774 mCaptureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, 3775 CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON); 3776 Log.i(TAG, "Turned ON video stabilization."); 3777 break; 3778 case CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION: 3779 mCaptureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, 3780 CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION); 3781 Log.i(TAG, "Turned ON preview stabilization."); 3782 break; 3783 case CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF: 3784 mCaptureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, 3785 CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF); 3786 Log.i(TAG, "Turned OFF video stabilization."); 3787 break; 3788 default: 3789 Log.w(TAG, "Invalid video stabilization mode " + videoStabilizationMode 3790 + ". Leaving unchanged."); 3791 break; 3792 } 3793 Log.i(TAG, "ois = " + ois); 3794 if (ois) { 3795 mCaptureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, 3796 CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON); 3797 } else { 3798 mCaptureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, 3799 CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF); 3800 } 3801 mCaptureRequestBuilder.addTarget(recordSurface); 3802 List<OutputConfiguration> configs = new ArrayList<OutputConfiguration>(); 3803 OutputConfiguration outConfig = new OutputConfiguration(recordSurface); 3804 outConfig.setDynamicRangeProfile(dynamicRangeProfile); 3805 configs.add(outConfig); 3806 if (extraConfigs != null) { 3807 configs.addAll(extraConfigs); 3808 } 3809 3810 SessionConfiguration sessionConfiguration = new SessionConfiguration( 3811 SessionConfiguration.SESSION_REGULAR, configs, 3812 new HandlerExecutor(mCameraHandler), 3813 new CameraCaptureSession.StateCallback() { 3814 @Override 3815 public void onConfigured(@NonNull CameraCaptureSession session) { 3816 mSession = session; 3817 if (mPreviewAction != null) { 3818 mPreviewAction.setSession(session); 3819 mPreviewAction.setCaptureRequestBuilder(mCaptureRequestBuilder); 3820 } 3821 try { 3822 mSession.setRepeatingRequest(mCaptureRequestBuilder.build(), 3823 captureCallback, mResultHandler); 3824 } catch (CameraAccessException e) { 3825 Log.e(TAG, "CameraCaptureSession configuration failed.", e); 3826 } 3827 } 3828 3829 @Override 3830 public void onReady(CameraCaptureSession session) { 3831 if (stateCallback != null) { 3832 stateCallback.onReady(session); 3833 } 3834 } 3835 3836 @Override 3837 public void onConfigureFailed(CameraCaptureSession session) { 3838 Log.i(TAG, "CameraCaptureSession configuration failed."); 3839 } 3840 3841 @Override 3842 public void onClosed(CameraCaptureSession session) { 3843 if (stateCallback != null) { 3844 stateCallback.onClosed(session); 3845 } 3846 } 3847 }); 3848 3849 // Create capture session 3850 mCamera.createCaptureSession(sessionConfiguration); 3851 } 3852 3853 // Returns the default camcorder profile for the given camera at the given quality level 3854 // Each CamcorderProfile has duration, quality, fileFormat, videoCodec, videoBitRate, 3855 // videoFrameRate,videoWidth, videoHeight, audioCodec, audioBitRate, audioSampleRate 3856 // and audioChannels. 3857 private CamcorderProfile getCamcorderProfile(int cameraId, int profileId) { 3858 CamcorderProfile camcorderProfile = CamcorderProfile.get(cameraId, profileId); 3859 return camcorderProfile; 3860 } 3861 3862 // This method should be called before preparing MediaRecorder. 3863 // Set video and audio source should be done before setting the CamcorderProfile. 3864 // Output file path should be set after setting the CamcorderProfile. 3865 // These events should always be done in this particular order. 3866 private void setupMediaRecorderWithProfile(CamcorderProfile camcorderProfile, 3867 String outputFilePath) { 3868 mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE); 3869 mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT); 3870 mMediaRecorder.setProfile(camcorderProfile); 3871 mMediaRecorder.setOutputFile(outputFilePath); 3872 } 3873 3874 private String getOutputMediaFile(int cameraId, Size videoSize, String quality, 3875 int fileFormat, int stabilizationMode, double zoomRatio) { 3876 return getOutputMediaFile(cameraId, videoSize, quality, fileFormat, 3877 /* hlg10Enabled= */false, stabilizationMode, zoomRatio, /* minFps */0, 3878 /* maxFps */0); 3879 } 3880 3881 private String getOutputMediaFile(int cameraId, Size videoSize, String quality, 3882 int fileFormat, boolean hlg10Enabled, int stabilizationMode, double zoomRatio) { 3883 return getOutputMediaFile(cameraId, videoSize, quality, fileFormat, 3884 hlg10Enabled, stabilizationMode, zoomRatio, /* minFps */0, 3885 /* maxFps */0); 3886 } 3887 3888 private String getOutputMediaFile(int cameraId, Size videoSize, String quality, 3889 int fileFormat, boolean hlg10Enabled, int stabilizationMode, double zoomRatio, 3890 int minFps, int maxFps) { 3891 // If any quality has file format other than 3gp and webm then the 3892 // recording file will have mp4 as default extension. 3893 String fileExtension = ""; 3894 if (fileFormat == MediaRecorder.OutputFormat.THREE_GPP) { 3895 fileExtension = ".3gp"; 3896 } else if (fileFormat == MediaRecorder.OutputFormat.WEBM) { 3897 fileExtension = ".webm"; 3898 } else { 3899 fileExtension = ".mp4"; 3900 } 3901 // All the video recordings will be available in VideoITS directory on device. 3902 File mediaStorageDir = new File(getExternalFilesDir(null), "VideoITS"); 3903 if (mediaStorageDir == null) { 3904 Log.e(TAG, "Failed to retrieve external files directory."); 3905 return null; 3906 } 3907 if (!mediaStorageDir.exists()) { 3908 if (!mediaStorageDir.mkdirs()) { 3909 Log.d(TAG, "Failed to create media storage directory."); 3910 return null; 3911 } 3912 } 3913 String timestamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()); 3914 String fileName = mediaStorageDir.getPath() + File.separator + 3915 "VID_" + timestamp + '_' + cameraId + '_' + quality + '_' + 3916 videoSize; 3917 if (!Double.isNaN(zoomRatio)) { 3918 fileName += "_" + zoomRatio; 3919 } 3920 if (hlg10Enabled) { 3921 fileName += "_hlg10"; 3922 } 3923 if (stabilizationMode != CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF){ 3924 fileName += "_stabilized"; 3925 } 3926 if (minFps > 0 && maxFps > 0) { 3927 fileName += "_" + minFps + "_" + maxFps; 3928 } 3929 3930 File mediaFile = new File(fileName); 3931 return mediaFile + fileExtension; 3932 } 3933 3934 private void doCaptureWithFlash(JSONObject params) throws ItsException { 3935 // Parse the json to get the capture requests 3936 List<CaptureRequest.Builder> previewStartRequests = ItsSerializer.deserializeRequestList( 3937 mCamera, params, "previewRequestStart"); 3938 List<CaptureRequest.Builder> previewIdleRequests = ItsSerializer.deserializeRequestList( 3939 mCamera, params, "previewRequestIdle"); 3940 List<CaptureRequest.Builder> stillCaptureRequests = ItsSerializer.deserializeRequestList( 3941 mCamera, params, "stillCaptureRequest"); 3942 3943 mCaptureResults = new CaptureResult[2]; 3944 3945 ThreeAResultListener threeAListener = new ThreeAResultListener(); 3946 List<OutputConfiguration> outputConfigs = new ArrayList<OutputConfiguration>(); 3947 SurfaceTexture preview = new SurfaceTexture(/*random int*/ 1); 3948 Surface previewSurface = new Surface(preview); 3949 try { 3950 mSessionListener = new BlockingSessionCallback(); 3951 try { 3952 mCountCapRes.set(0); 3953 mCountJpg.set(0); 3954 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); 3955 prepareImageReadersWithOutputSpecs(jsonOutputSpecs, /*inputSize*/null, 3956 /*inputFormat*/0, /*maxInputBuffers*/0, false, /*reuseSession*/ false); 3957 3958 outputConfigs.add(new OutputConfiguration(mOutputImageReaders[0].getSurface())); 3959 outputConfigs.add(new OutputConfiguration(previewSurface)); 3960 mCamera.createCaptureSessionByOutputConfigurations( 3961 outputConfigs, mSessionListener, mCameraHandler); 3962 mSession = mSessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 3963 ImageReader.OnImageAvailableListener readerListener = 3964 createAvailableListener(mCaptureCallback); 3965 mOutputImageReaders[0].setOnImageAvailableListener(readerListener, 3966 mSaveHandlers[0]); 3967 } catch (Exception e) { 3968 throw new ItsException("Error configuring outputs", e); 3969 } 3970 CaptureRequest.Builder previewIdleReq = previewIdleRequests.get(0); 3971 previewIdleReq.addTarget(previewSurface); 3972 mSession.setRepeatingRequest(previewIdleReq.build(), threeAListener, mResultHandler); 3973 Logt.i(TAG, "Triggering precapture sequence"); 3974 mPrecaptureTriggered = false; 3975 CaptureRequest.Builder previewStartReq = previewStartRequests.get(0); 3976 previewStartReq.addTarget(previewSurface); 3977 mSession.capture(previewStartReq.build(), threeAListener ,mResultHandler); 3978 mInterlock3A.open(); 3979 synchronized(m3AStateLock) { 3980 mPrecaptureTriggered = false; 3981 mConvergeAETriggered = false; 3982 } 3983 long tstart = System.currentTimeMillis(); 3984 while (!mPrecaptureTriggered) { 3985 if (!mInterlock3A.block(TIMEOUT_3A * 1000) || 3986 System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) { 3987 throw new ItsException ( 3988 "AE state is " + CaptureResult.CONTROL_AE_STATE_PRECAPTURE + 3989 "after " + TIMEOUT_3A + " seconds."); 3990 } 3991 } 3992 mConvergeAETriggered = false; 3993 3994 tstart = System.currentTimeMillis(); 3995 while (!mConvergeAETriggered) { 3996 if (!mInterlock3A.block(TIMEOUT_3A * 1000) || 3997 System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) { 3998 throw new ItsException ( 3999 "3A failed to converge after " + TIMEOUT_3A + " seconds.\n" + 4000 "AE converge state: " + mConvergedAE + "."); 4001 } 4002 } 4003 mInterlock3A.close(); 4004 Logt.i(TAG, "AE state after precapture sequence: " + mConvergeAETriggered); 4005 threeAListener.stop(); 4006 4007 // Send a still capture request 4008 CaptureRequest.Builder stillCaptureRequest = stillCaptureRequests.get(0); 4009 int aeMode = stillCaptureRequest.get(CaptureRequest.CONTROL_AE_MODE); 4010 Logt.i(TAG, String.format("Taking still capture with AE_MODE: %d", aeMode)); 4011 stillCaptureRequest.addTarget(mOutputImageReaders[0].getSurface()); 4012 mSession.capture(stillCaptureRequest.build(), mCaptureResultListener, mResultHandler); 4013 mCountCallbacksRemaining.set(1); 4014 long timeout = TIMEOUT_CALLBACK * 1000; 4015 waitForCallbacks(timeout); 4016 mSession.stopRepeating(); 4017 } catch (android.hardware.camera2.CameraAccessException e) { 4018 throw new ItsException("Access error: ", e); 4019 } finally { 4020 if (mSession != null) { 4021 mSession.close(); 4022 } 4023 previewSurface.release(); 4024 preview.release(); 4025 } 4026 } 4027 4028 private void doCaptureWithExtensions(JSONObject params, int extension) throws ItsException { 4029 try { 4030 // Parse the JSON to get the list of capture requests. 4031 List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList( 4032 mCamera, params, "captureRequests"); 4033 4034 BlockingExtensionSessionCallback sessionListener = 4035 new BlockingExtensionSessionCallback(); 4036 mCountRawOrDng.set(0); 4037 mCountJpg.set(0); 4038 mCountYuv.set(0); 4039 mCountRaw10.set(0); 4040 mCountRaw12.set(0); 4041 mCountCapRes.set(0); 4042 mCountRaw10QuadBayer.set(0); 4043 mCountRaw10Stats.set(0); 4044 mCountRaw10QuadBayerStats.set(0); 4045 mCountRaw.set(0); 4046 mCountRawQuadBayer.set(0); 4047 mCountRawStats.set(0); 4048 mCountRawQuadBayerStats.set(0); 4049 4050 mCaptureRawIsDng = false; 4051 mCaptureRawIsStats = false; 4052 mCaptureRawIsQuadBayer = false; 4053 mCaptureRawIsQuadBayerStats = false; 4054 mCaptureResults = new CaptureResult[requests.size()]; 4055 4056 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); 4057 4058 prepareImageReadersWithOutputSpecs(jsonOutputSpecs, 4059 /*inputSize*/null, /*inputFormat*/0, /*maxInputBuffers*/0, 4060 /*backgroundRequest*/ false, /*reuseSession*/ false); 4061 4062 configureAndCreateExtensionSession( 4063 mOutputImageReaders[0].getSurface(), 4064 extension, 4065 sessionListener); 4066 4067 mExtensionSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS_EXTENSIONS); 4068 4069 CaptureRequest.Builder captureBuilder = requests.get(0); 4070 4071 if (params.optBoolean("waitAE", true)) { 4072 if (mExtensionPreviewImageReader == null) { 4073 throw new ItsException("Preview ImageReader has not been initialized!"); 4074 } 4075 // Set repeating request and wait for AE convergence, using another ImageReader. 4076 Logt.i(TAG, "Waiting for AE to converge before taking extensions capture."); 4077 CaptureRequest.Builder previewRequestBuilder = mCamera.createCaptureRequest( 4078 CameraDevice.TEMPLATE_PREVIEW); 4079 previewRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, 4080 CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW); 4081 previewRequestBuilder.addTarget(mExtensionPreviewImageReader.getSurface()); 4082 ImageReader.OnImageAvailableListener dropperListener = 4083 createAvailableListenerDropper(); 4084 mExtensionPreviewImageReader.setOnImageAvailableListener(dropperListener, 4085 mSaveHandlers[0]); 4086 mExtensionSession.setRepeatingRequest(previewRequestBuilder.build(), 4087 new HandlerExecutor(mResultHandler), 4088 mExtAEResultListener); 4089 mCountCallbacksRemaining.set(1); 4090 long timeout = TIMEOUT_CALLBACK * 1000; 4091 waitForCallbacks(timeout); 4092 mExtensionSession.stopRepeating(); 4093 mResultThread.sleep(PIPELINE_WARMUP_TIME_MS); 4094 } 4095 4096 ImageReader.OnImageAvailableListener readerListener = 4097 createExtensionAvailableListener(mCaptureCallback); 4098 mOutputImageReaders[0].setOnImageAvailableListener(readerListener, 4099 mSaveHandlers[0]); 4100 captureBuilder.addTarget(mOutputImageReaders[0].getSurface()); 4101 mExtensionSession.capture(captureBuilder.build(), new HandlerExecutor(mResultHandler), 4102 mExtCaptureResultListener); 4103 // Two callbacks: one for onCaptureResultAvailable and one for onImageAvailable 4104 mCountCallbacksRemaining.set(2); 4105 long timeout = TIMEOUT_CALLBACK * 1000; 4106 waitForCallbacks(timeout); 4107 4108 if (mExtensionPreviewImageReader != null) { 4109 mExtensionPreviewImageReader.close(); 4110 mExtensionPreviewImageReader = null; 4111 } 4112 4113 // Close session and wait until session is fully closed 4114 mExtensionSession.close(); 4115 sessionListener.getStateWaiter().waitForState( 4116 BlockingExtensionSessionCallback.SESSION_CLOSED, TIMEOUT_SESSION_CLOSE); 4117 } catch (android.hardware.camera2.CameraAccessException e) { 4118 throw new ItsException("Access error: ", e); 4119 } catch (InterruptedException e) { 4120 throw new ItsException("Unexpected InterruptedException: ", e); 4121 } 4122 } 4123 4124 private void doCapture(JSONObject params) throws ItsException { 4125 boolean reuseSession = params.optBoolean("reuseSession", false); 4126 try { 4127 // Parse the JSON to get the list of capture requests. 4128 List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList( 4129 mCamera, params, "captureRequests"); 4130 4131 // optional background preview requests 4132 List<CaptureRequest.Builder> backgroundRequests = ItsSerializer.deserializeRequestList( 4133 mCamera, params, "repeatRequests"); 4134 boolean backgroundRequest = backgroundRequests.size() > 0; 4135 boolean firstSurfaceFor3A = params.optBoolean("firstSurfaceFor3A", false); 4136 int indexOffsetFor3A = firstSurfaceFor3A ? 1 : 0; 4137 4138 int numSurfaces = 0; 4139 int numCaptureSurfaces = 0; 4140 try { 4141 mCountRawOrDng.set(0); 4142 mCountJpg.set(0); 4143 mCountYuv.set(0); 4144 mCountRaw10.set(0); 4145 mCountRaw12.set(0); 4146 mCountCapRes.set(0); 4147 mCountRaw10QuadBayer.set(0); 4148 mCountRaw10Stats.set(0); 4149 mCountRaw10QuadBayerStats.set(0); 4150 mCountRaw.set(0); 4151 mCountRawQuadBayer.set(0); 4152 mCountRawStats.set(0); 4153 mCountRawQuadBayerStats.set(0); 4154 4155 mCaptureRawIsDng = false; 4156 mCaptureRawIsStats = false; 4157 mCaptureRawIsQuadBayer = false; 4158 mCaptureRawIsQuadBayerStats = false; 4159 mCaptureResults = new CaptureResult[requests.size()]; 4160 4161 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); 4162 boolean is10bitOutputPresent = false; 4163 4164 if (mOutputImageReaders == null) { 4165 Logt.i(TAG, "Preparing image readers with output specs in doCapture"); 4166 is10bitOutputPresent = prepareImageReadersWithOutputSpecs(jsonOutputSpecs, 4167 /*inputSize*/null, /*inputFormat*/0, /*maxInputBuffers*/0, 4168 backgroundRequest, reuseSession); 4169 } else { 4170 is10bitOutputPresent = mImageReaderArgs.getHas10bitOutput(); 4171 } 4172 numSurfaces = mOutputImageReaders.length; 4173 numCaptureSurfaces = numSurfaces - (backgroundRequest ? 1 : 0) 4174 - indexOffsetFor3A; 4175 if (numCaptureSurfaces <= 0) { 4176 throw new ItsException(String.format( 4177 "Invalid number of capture surfaces: numSurfaces %d, " 4178 + "backgroundRequest %b, firstSurfaceFor3A: %b!", numSurfaces, 4179 backgroundRequest, firstSurfaceFor3A)); 4180 } 4181 4182 List<OutputConfiguration> outputConfigs = getCaptureOutputConfigurations( 4183 jsonOutputSpecs, is10bitOutputPresent); 4184 if (mSession != null && reuseSession 4185 && mCaptureOutputConfigs.equals(outputConfigs)) { 4186 Logt.i(TAG, "Reusing camera capture session in doCapture()"); 4187 } else { 4188 Logt.i(TAG, "Need to create new capture session in doCapture()"); 4189 mSessionListener = new BlockingSessionCallback(); 4190 mCamera.createCaptureSessionByOutputConfigurations( 4191 outputConfigs, mSessionListener, mCameraHandler); 4192 mSession = mSessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 4193 mSessionListener.getStateWaiter().waitForState( 4194 BlockingSessionCallback.SESSION_READY, TIMEOUT_SESSION_READY); 4195 Logt.i(TAG, "New capture session created."); 4196 } 4197 mCaptureOutputConfigs = new ArrayList<OutputConfiguration>(outputConfigs); 4198 4199 for (int i = indexOffsetFor3A; i < numSurfaces; i++) { 4200 ImageReader.OnImageAvailableListener readerListener; 4201 if (backgroundRequest && i == numSurfaces - 1) { 4202 readerListener = createAvailableListenerDropper(); 4203 } else { 4204 readerListener = createAvailableListener(mCaptureCallback); 4205 } 4206 mOutputImageReaders[i].setOnImageAvailableListener(readerListener, 4207 mSaveHandlers[i]); 4208 } 4209 4210 // Plan for how many callbacks need to be received throughout the duration of this 4211 // sequence of capture requests. There is one callback per image surface, and one 4212 // callback for the CaptureResult, for each capture. 4213 int numCaptures = requests.size(); 4214 mCountCallbacksRemaining.set(numCaptures * (numCaptureSurfaces + 1)); 4215 4216 } catch (CameraAccessException e) { 4217 throw new ItsException("Error configuring outputs", e); 4218 } catch (org.json.JSONException e) { 4219 throw new ItsException("Error parsing params", e); 4220 } 4221 4222 // Start background requests and let it warm up pipeline 4223 if (backgroundRequest) { 4224 List<CaptureRequest> bgRequestList = 4225 new ArrayList<CaptureRequest>(backgroundRequests.size()); 4226 for (int i = 0; i < backgroundRequests.size(); i++) { 4227 CaptureRequest.Builder req = backgroundRequests.get(i); 4228 req.addTarget(mOutputImageReaders[numCaptureSurfaces].getSurface()); 4229 bgRequestList.add(req.build()); 4230 } 4231 mSession.setRepeatingBurst(bgRequestList, null, null); 4232 // warm up the pipeline 4233 Thread.sleep(PIPELINE_WARMUP_TIME_MS); 4234 } 4235 4236 // Initiate the captures. 4237 long maxExpTimeNs = -1; 4238 List<CaptureRequest> requestList = 4239 new ArrayList<>(requests.size()); 4240 for (int i = 0; i < requests.size(); i++) { 4241 CaptureRequest.Builder req = requests.get(i); 4242 // For DNG captures, need the LSC map to be available. 4243 if (mCaptureRawIsDng) { 4244 req.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 1); 4245 } 4246 if (mCaptureRawIsQuadBayer || mCaptureRawIsQuadBayerStats) { 4247 req.set(CaptureRequest.SENSOR_PIXEL_MODE, 4248 CaptureRequest.SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION); 4249 } 4250 Long expTimeNs = req.get(CaptureRequest.SENSOR_EXPOSURE_TIME); 4251 if (expTimeNs != null && expTimeNs > maxExpTimeNs) { 4252 maxExpTimeNs = expTimeNs; 4253 } 4254 4255 for (int j = 0; j < numCaptureSurfaces; j++) { 4256 req.addTarget(mOutputImageReaders[j + indexOffsetFor3A].getSurface()); 4257 } 4258 requestList.add(req.build()); 4259 } 4260 mSession.captureBurst(requestList, mCaptureResultListener, mResultHandler); 4261 4262 long timeout = TIMEOUT_CALLBACK * 1000; 4263 if (maxExpTimeNs > 0) { 4264 timeout += maxExpTimeNs / 1000000; // ns to ms 4265 } 4266 // Make sure all callbacks have been hit (wait until captures are done). 4267 // If no timeouts are received after a timeout, then fail. 4268 waitForCallbacks(timeout); 4269 4270 mSession.stopRepeating(); 4271 mSessionListener.getStateWaiter().waitForState( 4272 BlockingSessionCallback.SESSION_READY, TIMEOUT_SESSION_READY); 4273 Logt.i(TAG, "Session is ready again after doing capture."); 4274 4275 // Close session and wait until session is fully closed, if desired. 4276 if (!reuseSession) { 4277 mSession.close(); 4278 mSessionListener.getStateWaiter().waitForState( 4279 BlockingSessionCallback.SESSION_CLOSED, TIMEOUT_SESSION_CLOSE); 4280 mSession = null; 4281 closeImageReaders(); 4282 } 4283 } catch (android.hardware.camera2.CameraAccessException e) { 4284 throw new ItsException("Access error: ", e); 4285 } catch (InterruptedException e) { 4286 throw new ItsException("Unexpected InterruptedException: ", e); 4287 } 4288 } 4289 4290 /** 4291 * Perform reprocess captures. 4292 * 4293 * It takes captureRequests in a JSON object and perform capture requests in two steps: 4294 * regular capture request to get reprocess input and reprocess capture request to get 4295 * reprocess outputs. 4296 * 4297 * Regular capture requests: 4298 * 1. For each capture request in the JSON object, create a full-size capture request with 4299 * the settings in the JSON object. 4300 * 2. Remember and clear noise reduction, edge enhancement, and effective exposure factor 4301 * from the regular capture requests. (Those settings will be used for reprocess requests.) 4302 * 3. Submit the regular capture requests. 4303 * 4304 * Reprocess capture requests: 4305 * 4. Wait for the regular capture results and use them to create reprocess capture requests. 4306 * 5. Wait for the regular capture output images and queue them to the image writer. 4307 * 6. Set the noise reduction, edge enhancement, and effective exposure factor from #2. 4308 * 7. Submit the reprocess capture requests. 4309 * 4310 * The output images and results for the regular capture requests won't be written to socket. 4311 * The output images and results for the reprocess capture requests will be written to socket. 4312 */ 4313 private void doReprocessCapture(JSONObject params) throws ItsException { 4314 ImageWriter imageWriter = null; 4315 ArrayList<Integer> noiseReductionModes = new ArrayList<>(); 4316 ArrayList<Integer> edgeModes = new ArrayList<>(); 4317 ArrayList<Float> effectiveExposureFactors = new ArrayList<>(); 4318 4319 mCountRawOrDng.set(0); 4320 mCountJpg.set(0); 4321 mCountYuv.set(0); 4322 mCountRaw10.set(0); 4323 mCountRaw12.set(0); 4324 mCountCapRes.set(0); 4325 mCountRaw10QuadBayer.set(0); 4326 mCountRaw10Stats.set(0); 4327 mCountRaw10QuadBayerStats.set(0); 4328 mCountRaw.set(0); 4329 mCountRawQuadBayer.set(0); 4330 mCountRawStats.set(0); 4331 mCountRawQuadBayerStats.set(0); 4332 4333 mCaptureRawIsDng = false; 4334 mCaptureRawIsStats = false; 4335 mCaptureRawIsQuadBayer = false; 4336 mCaptureRawIsQuadBayerStats = false; 4337 4338 try { 4339 // Parse the JSON to get the list of capture requests. 4340 List<CaptureRequest.Builder> inputRequests = 4341 ItsSerializer.deserializeRequestList(mCamera, params, "captureRequests"); 4342 4343 // Prepare the image readers for reprocess input and reprocess outputs. 4344 int inputFormat = getReprocessInputFormat(params); 4345 Size inputSize = ItsUtils.getMaxOutputSize(mCameraCharacteristics, inputFormat); 4346 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); 4347 prepareImageReadersWithOutputSpecs(jsonOutputSpecs, inputSize, inputFormat, 4348 inputRequests.size(), /*backgroundRequest*/false, /*reuseSession*/ false); 4349 4350 // Prepare a reprocessable session. 4351 int numOutputSurfaces = mOutputImageReaders.length; 4352 InputConfiguration inputConfig = new InputConfiguration(inputSize.getWidth(), 4353 inputSize.getHeight(), inputFormat); 4354 List<Surface> outputSurfaces = new ArrayList<Surface>(); 4355 boolean addSurfaceForInput = true; 4356 for (int i = 0; i < numOutputSurfaces; i++) { 4357 outputSurfaces.add(mOutputImageReaders[i].getSurface()); 4358 if (mOutputImageReaders[i] == mInputImageReader) { 4359 // If input and one of the outputs share the same image reader, avoid 4360 // adding the same surfaces twice. 4361 addSurfaceForInput = false; 4362 } 4363 } 4364 4365 if (addSurfaceForInput) { 4366 // Besides the output surfaces specified in JSON object, add an additional one 4367 // for reprocess input. 4368 outputSurfaces.add(mInputImageReader.getSurface()); 4369 } 4370 4371 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 4372 mCamera.createReprocessableCaptureSession(inputConfig, outputSurfaces, sessionListener, 4373 mCameraHandler); 4374 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 4375 4376 // Create an image writer for reprocess input. 4377 Surface inputSurface = mSession.getInputSurface(); 4378 imageWriter = ImageWriter.newInstance(inputSurface, inputRequests.size()); 4379 4380 // Set up input reader listener and capture callback listener to get 4381 // reprocess input buffers and the results in order to create reprocess capture 4382 // requests. 4383 ImageReaderListenerWaiter inputReaderListener = new ImageReaderListenerWaiter(); 4384 mInputImageReader.setOnImageAvailableListener(inputReaderListener, mSaveHandlers[0]); 4385 4386 CaptureCallbackWaiter captureCallbackWaiter = new CaptureCallbackWaiter(); 4387 // Prepare the reprocess input request 4388 for (CaptureRequest.Builder inputRequest : inputRequests) { 4389 // Remember and clear noise reduction, edge enhancement, and effective exposure 4390 // factors. 4391 noiseReductionModes.add(inputRequest.get(CaptureRequest.NOISE_REDUCTION_MODE)); 4392 edgeModes.add(inputRequest.get(CaptureRequest.EDGE_MODE)); 4393 effectiveExposureFactors.add(inputRequest.get( 4394 CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)); 4395 4396 inputRequest.set(CaptureRequest.NOISE_REDUCTION_MODE, 4397 CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG); 4398 inputRequest.set(CaptureRequest.EDGE_MODE, 4399 CaptureRequest.EDGE_MODE_ZERO_SHUTTER_LAG); 4400 inputRequest.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, null); 4401 inputRequest.addTarget(mInputImageReader.getSurface()); 4402 mSession.capture(inputRequest.build(), captureCallbackWaiter, mResultHandler); 4403 } 4404 4405 // Wait for reprocess input images 4406 ArrayList<CaptureRequest.Builder> reprocessOutputRequests = new ArrayList<>(); 4407 for (int i = 0; i < inputRequests.size(); i++) { 4408 TotalCaptureResult result = 4409 captureCallbackWaiter.getResult(TIMEOUT_CALLBACK * 1000); 4410 reprocessOutputRequests.add(mCamera.createReprocessCaptureRequest(result)); 4411 imageWriter.queueInputImage(inputReaderListener.getImage(TIMEOUT_CALLBACK * 1000)); 4412 } 4413 4414 // Start performing reprocess captures. 4415 4416 mCaptureResults = new CaptureResult[inputRequests.size()]; 4417 4418 // Prepare reprocess capture requests. 4419 for (int i = 0; i < numOutputSurfaces; i++) { 4420 ImageReader.OnImageAvailableListener outputReaderListener = 4421 createAvailableListener(mCaptureCallback); 4422 mOutputImageReaders[i].setOnImageAvailableListener(outputReaderListener, 4423 mSaveHandlers[i]); 4424 } 4425 4426 // Plan for how many callbacks need to be received throughout the duration of this 4427 // sequence of capture requests. There is one callback per image surface, and one 4428 // callback for the CaptureResult, for each capture. 4429 int numCaptures = reprocessOutputRequests.size(); 4430 mCountCallbacksRemaining.set(numCaptures * (numOutputSurfaces + 1)); 4431 4432 // Initiate the captures. 4433 for (int i = 0; i < reprocessOutputRequests.size(); i++) { 4434 CaptureRequest.Builder req = reprocessOutputRequests.get(i); 4435 for (ImageReader outputImageReader : mOutputImageReaders) { 4436 req.addTarget(outputImageReader.getSurface()); 4437 } 4438 4439 req.set(CaptureRequest.NOISE_REDUCTION_MODE, noiseReductionModes.get(i)); 4440 req.set(CaptureRequest.EDGE_MODE, edgeModes.get(i)); 4441 req.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, 4442 effectiveExposureFactors.get(i)); 4443 4444 mSession.capture(req.build(), mCaptureResultListener, mResultHandler); 4445 } 4446 4447 // Make sure all callbacks have been hit (wait until captures are done). 4448 // If no timeouts are received after a timeout, then fail. 4449 waitForCallbacks(TIMEOUT_CALLBACK * 1000); 4450 } catch (android.hardware.camera2.CameraAccessException e) { 4451 throw new ItsException("Access error: ", e); 4452 } finally { 4453 closeImageReaders(); 4454 if (mSession != null) { 4455 mSession.close(); 4456 mSession = null; 4457 } 4458 if (imageWriter != null) { 4459 imageWriter.close(); 4460 } 4461 } 4462 } 4463 4464 @Override 4465 public final void onAccuracyChanged(Sensor sensor, int accuracy) { 4466 Logt.i(TAG, "Sensor " + sensor.getName() + " accuracy changed to " + accuracy); 4467 } 4468 4469 @Override 4470 public final void onSensorChanged(SensorEvent event) { 4471 synchronized(mEventLock) { 4472 if (mEventsEnabled) { 4473 MySensorEvent ev2 = new MySensorEvent(); 4474 ev2.sensor = event.sensor; 4475 ev2.accuracy = event.accuracy; 4476 ev2.timestamp = event.timestamp; 4477 ev2.values = new float[event.values.length]; 4478 System.arraycopy(event.values, 0, ev2.values, 0, event.values.length); 4479 mEvents.add(ev2); 4480 } 4481 } 4482 } 4483 4484 /** 4485 * Computes the stats image from raw image byte array and sends the stats image buffer. 4486 * 4487 * @param statsFormat The format of stats images. 4488 * @param cameraCharacteristics Camera characteristics object. 4489 * @param img Image byte array. 4490 * @param captureWidth The width of raw image. 4491 * @param captureHeight The height of raw image. 4492 * @param gridWidth The grid width. 4493 * @param gridHeight The grid height. 4494 * @param bufTag The tag of stats image buffer. 4495 * @throws ItsException If the stats image computation fails. 4496 * @throws InterruptedException If there is not enough quota available in the socket queue. 4497 */ 4498 private void computeAndSendStatsImage(String statsFormat, 4499 CameraCharacteristics cameraCharacteristics, byte[] img, int captureWidth, 4500 int captureHeight, int gridWidth, int gridHeight, String bufTag) 4501 throws ItsException, InterruptedException { 4502 long startTimeMs = SystemClock.elapsedRealtime(); 4503 boolean isMaximumResolution = statsFormat.contains("QuadBayer"); 4504 Rect activeArrayCropRegion = ItsUtils.getActiveArrayCropRegion( 4505 cameraCharacteristics, isMaximumResolution); 4506 int aaw = activeArrayCropRegion.width(); 4507 int aah = activeArrayCropRegion.height(); 4508 int aax = activeArrayCropRegion.left; 4509 int aay = activeArrayCropRegion.top; 4510 float[] stats = StatsImage.computeStatsImage(img, statsFormat, captureWidth, captureHeight, 4511 aax, aay, aaw, aah, gridWidth, gridHeight); 4512 if (stats == null) { 4513 throw new ItsException(String.format(Locale.getDefault(), 4514 "Stats image computation fails with format %s.", statsFormat)); 4515 } 4516 long endTimeMs = SystemClock.elapsedRealtime(); 4517 Logt.i(TAG, String.format(Locale.getDefault(), 4518 "%s computation takes %d ms.", statsFormat, endTimeMs - startTimeMs)); 4519 int statsImgSize = stats.length * 4; 4520 if (mSocketQueueQuota != null) { 4521 mSocketQueueQuota.release(img.length); 4522 mSocketQueueQuota.acquire(statsImgSize); 4523 } 4524 ByteBuffer bBuf = ByteBuffer.allocate(statsImgSize); 4525 bBuf.order(ByteOrder.nativeOrder()); 4526 FloatBuffer fBuf = bBuf.asFloatBuffer(); 4527 fBuf.put(stats); 4528 fBuf.position(0); 4529 mSocketRunnableObj.sendResponseCaptureBuffer(bufTag, bBuf); 4530 } 4531 4532 private final CaptureCallback mCaptureCallback = new CaptureCallback() { 4533 @Override 4534 public void onCaptureAvailable(Image capture, String physicalCameraId) { 4535 if (physicalCameraId != null && !physicalCameraId.isEmpty()) { 4536 CameraCharacteristics physicalCameraCharacteristics = mPhysicalCameraChars.get( 4537 physicalCameraId); 4538 if (physicalCameraCharacteristics != null) { 4539 mCameraCharacteristics = physicalCameraCharacteristics; 4540 Logt.i(TAG, String.format(Locale.getDefault(), 4541 "Physical camera Id is non-empty, set mCameraCharacteristics to the " 4542 + "characteristics of physical camera %s.", 4543 physicalCameraId)); 4544 } 4545 } 4546 4547 try { 4548 int format = capture.getFormat(); 4549 final int captureWidth = capture.getWidth(); 4550 final int captureHeight = capture.getHeight(); 4551 if (format == ImageFormat.JPEG) { 4552 Logt.i(TAG, "Received JPEG capture"); 4553 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 4554 ByteBuffer buf = ByteBuffer.wrap(img); 4555 mCountJpg.getAndIncrement(); 4556 mSocketRunnableObj.sendResponseCaptureBuffer("jpegImage" + physicalCameraId, 4557 buf); 4558 } else if (format == ImageFormat.HEIC_ULTRAHDR) { 4559 Logt.i(TAG, "Received HEIC_ULTRAHDR capture"); 4560 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 4561 ByteBuffer buf = ByteBuffer.wrap(img); 4562 mCountJpg.getAndIncrement(); 4563 mSocketRunnableObj.sendResponseCaptureBuffer("heic_ultrahdrImage" + 4564 physicalCameraId, buf); 4565 } else if (format == ImageFormat.JPEG_R) { 4566 Logt.i(TAG, "Received JPEG/R capture"); 4567 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 4568 ByteBuffer buf = ByteBuffer.wrap(img); 4569 mCountJpg.getAndIncrement(); 4570 mSocketRunnableObj.sendResponseCaptureBuffer("jpeg_rImage" + physicalCameraId, 4571 buf); 4572 } else if (format == ImageFormat.PRIVATE) { 4573 Logt.i(TAG, "Received PRIVATE capture"); 4574 // Private images have client opaque buffers 4575 mSocketRunnableObj.sendResponseCaptureBuffer("privImage" + physicalCameraId, 4576 null); 4577 } else if (format == ImageFormat.YUV_420_888) { 4578 Logt.i(TAG, "Received YUV capture"); 4579 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 4580 ByteBuffer buf = ByteBuffer.wrap(img); 4581 mSocketRunnableObj.sendResponseCaptureBuffer( 4582 "yuvImage" + physicalCameraId, buf); 4583 } else if (format == ImageFormat.RAW10) { 4584 Logt.i(TAG, "Received RAW10 capture."); 4585 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 4586 if (mCaptureRawIsStats) { 4587 String statsFormat = StatsFormat.RAW10_STATS.getValue(); 4588 String bufTag = "raw10StatsImage" + physicalCameraId; 4589 computeAndSendStatsImage(statsFormat, mCameraCharacteristics, img, 4590 captureWidth, captureHeight, mCaptureStatsGridWidth, 4591 mCaptureStatsGridHeight, bufTag); 4592 mCountRaw10Stats.getAndIncrement(); 4593 } else if (mCaptureRawIsQuadBayerStats) { 4594 String statsFormat = StatsFormat.RAW10_QUAD_BAYER_STATS.getValue(); 4595 String bufTag = "raw10QuadBayerStatsImage" + physicalCameraId; 4596 computeAndSendStatsImage(statsFormat, mCameraCharacteristics, img, 4597 captureWidth, captureHeight, mCaptureStatsGridWidth, 4598 mCaptureStatsGridHeight, bufTag); 4599 mCountRaw10QuadBayerStats.getAndIncrement(); 4600 } else if (mCaptureRawIsQuadBayer) { 4601 ByteBuffer buf = ByteBuffer.wrap(img); 4602 mSocketRunnableObj.sendResponseCaptureBuffer( 4603 "raw10QuadBayerImage" + physicalCameraId, buf); 4604 mCountRaw10QuadBayer.getAndIncrement(); 4605 } else { 4606 ByteBuffer buf = ByteBuffer.wrap(img); 4607 mSocketRunnableObj.sendResponseCaptureBuffer( 4608 "raw10Image" + physicalCameraId, buf); 4609 } 4610 } else if (format == ImageFormat.RAW12) { 4611 Logt.i(TAG, "Received RAW12 capture"); 4612 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 4613 ByteBuffer buf = ByteBuffer.wrap(img); 4614 mCountRaw12.getAndIncrement(); 4615 mSocketRunnableObj.sendResponseCaptureBuffer("raw12Image" + physicalCameraId, 4616 buf); 4617 } else if (format == ImageFormat.RAW_SENSOR) { 4618 Logt.i(TAG, "Received RAW16 capture"); 4619 int count = mCountRawOrDng.getAndIncrement(); 4620 if (!mCaptureRawIsDng) { 4621 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 4622 if (mCaptureRawIsStats) { 4623 String statsFormat = StatsFormat.RAW16_STATS.getValue(); 4624 String bufTag = "rawStatsImage" + physicalCameraId; 4625 computeAndSendStatsImage(statsFormat, mCameraCharacteristics, img, 4626 captureWidth, captureHeight, mCaptureStatsGridWidth, 4627 mCaptureStatsGridHeight, bufTag); 4628 mCountRawStats.getAndIncrement(); 4629 } else if (mCaptureRawIsQuadBayerStats) { 4630 String statsFormat = StatsFormat.RAW16_QUAD_BAYER_STATS.getValue(); 4631 String bufTag = "rawQuadBayerStatsImage" + physicalCameraId; 4632 computeAndSendStatsImage(statsFormat, mCameraCharacteristics, img, 4633 captureWidth, captureHeight, mCaptureStatsGridWidth, 4634 mCaptureStatsGridHeight, bufTag); 4635 mCountRawQuadBayerStats.getAndIncrement(); 4636 } else if (mCaptureRawIsQuadBayer) { 4637 ByteBuffer buf = ByteBuffer.wrap(img); 4638 mSocketRunnableObj.sendResponseCaptureBuffer( 4639 "rawQuadBayerImage" + physicalCameraId, buf); 4640 mCountRawQuadBayer.getAndIncrement(); 4641 } else { 4642 ByteBuffer buf = ByteBuffer.wrap(img); 4643 mSocketRunnableObj.sendResponseCaptureBuffer( 4644 "rawImage" + physicalCameraId, buf); 4645 mCountRaw.getAndIncrement(); 4646 } 4647 } else { 4648 // Wait until the corresponding capture result is ready, up to a timeout. 4649 long t0 = android.os.SystemClock.elapsedRealtime(); 4650 while (!mThreadExitFlag 4651 && android.os.SystemClock.elapsedRealtime() - t0 < TIMEOUT_CAP_RES) { 4652 if (mCaptureResults[count] != null) { 4653 Logt.i(TAG, "Writing capture as DNG"); 4654 DngCreator dngCreator = new DngCreator( 4655 mCameraCharacteristics, mCaptureResults[count]); 4656 ByteArrayOutputStream dngStream = new ByteArrayOutputStream(); 4657 dngCreator.writeImage(dngStream, capture); 4658 byte[] dngArray = dngStream.toByteArray(); 4659 if (mSocketQueueQuota != null) { 4660 // Ideally we should acquire before allocating memory, but 4661 // here the DNG size is unknown before toByteArray call, so 4662 // we have to register the size afterward. This should still 4663 // works most of the time since all DNG images are handled by 4664 // the same handler thread, so we are at most one buffer over 4665 // the quota. 4666 mSocketQueueQuota.acquire(dngArray.length); 4667 } 4668 ByteBuffer dngBuf = ByteBuffer.wrap(dngArray); 4669 mSocketRunnableObj.sendResponseCaptureBuffer("dngImage", dngBuf); 4670 break; 4671 } else { 4672 Thread.sleep(1); 4673 } 4674 } 4675 } 4676 } else if (format == ImageFormat.Y8) { 4677 Logt.i(TAG, "Received Y8 capture"); 4678 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 4679 ByteBuffer buf = ByteBuffer.wrap(img); 4680 mSocketRunnableObj.sendResponseCaptureBuffer( 4681 "y8Image" + physicalCameraId, buf); 4682 } else { 4683 throw new ItsException("Unsupported image format: " + format); 4684 } 4685 4686 synchronized (mCountCallbacksRemaining) { 4687 mCountCallbacksRemaining.decrementAndGet(); 4688 mCountCallbacksRemaining.notify(); 4689 } 4690 } catch (IOException | InterruptedException | ItsException e) { 4691 Logt.e(TAG, "Script error: ", e); 4692 } 4693 } 4694 }; 4695 4696 private static float r2f(Rational r) { 4697 return (float)r.getNumerator() / (float)r.getDenominator(); 4698 } 4699 4700 private boolean hasCapability(int capability) throws ItsException { 4701 return hasCapability(capability, false, -1 /* cameraExtensionMode */); 4702 } 4703 4704 /** 4705 * Returns true if the camera has the given capability. 4706 * 4707 * @param capability The capability to check. 4708 * @param isCameraExtension Whether the capability is for a camera extension session. 4709 * @param cameraExtensionMode The mode of the camera extension. Only used if 4710 * isCameraExtension is true. Use -1 for non-camera extension sessions. 4711 * @return True if the camera has the given capability. 4712 */ 4713 private boolean hasCapability(int capability, boolean isCameraExtension, 4714 int cameraExtensionMode) throws ItsException { 4715 final int[] capabilities; 4716 if (isCameraExtension) { 4717 if (!ItsUtils.isAtLeastV()) { 4718 return false; 4719 } 4720 capabilities = mCameraExtensionCharacteristics.get(cameraExtensionMode, 4721 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 4722 if (capabilities == null) { 4723 return false; 4724 } 4725 } else { 4726 capabilities = mCameraCharacteristics.get( 4727 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 4728 if (capabilities == null) { 4729 throw new ItsException("Failed to get capabilities"); 4730 } 4731 } 4732 for (int c : capabilities) { 4733 if (c == capability) { 4734 return true; 4735 } 4736 } 4737 return false; 4738 } 4739 4740 private String buildLogString(CaptureResult result) throws ItsException { 4741 return buildLogString(result, false, -1 /* cameraExtensionMode */); 4742 } 4743 4744 /** 4745 * Returns a string representation of the given capture result. 4746 * 4747 * @param result The capture result to log. 4748 * @param isCameraExtension Whether the capture result is for a camera extension session. 4749 * @param cameraExtensionMode The mode of the camera extension. Only used if 4750 * isCameraExtension is true. Use -1 for non-camera extension sessions. 4751 * @return A string representation of the capture result. 4752 */ 4753 private String buildLogString(CaptureResult result, 4754 boolean isCameraExtension, int cameraExtensionMode) throws ItsException { 4755 StringBuilder logMsg = new StringBuilder(); 4756 logMsg.append(String.format( 4757 "Capt result: AE=%d, AF=%d, AWB=%d, ", 4758 result.get(CaptureResult.CONTROL_AE_STATE), 4759 result.get(CaptureResult.CONTROL_AF_STATE), 4760 result.get(CaptureResult.CONTROL_AWB_STATE))); 4761 4762 boolean readSensorSettings = hasCapability( 4763 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS, 4764 isCameraExtension, 4765 cameraExtensionMode); 4766 4767 if (readSensorSettings) { 4768 logMsg.append(String.format( 4769 "sens=%d, exp=%.1fms, dur=%.1fms, ", 4770 result.get(CaptureResult.SENSOR_SENSITIVITY), 4771 result.get(CaptureResult.SENSOR_EXPOSURE_TIME).longValue() / 1000000.0f, 4772 result.get(CaptureResult.SENSOR_FRAME_DURATION).longValue() / 4773 1000000.0f)); 4774 } 4775 if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null) { 4776 logMsg.append(String.format( 4777 "gains=[%.1f, %.1f, %.1f, %.1f], ", 4778 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(), 4779 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(), 4780 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(), 4781 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue())); 4782 } else { 4783 logMsg.append("gains=[], "); 4784 } 4785 if (result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) { 4786 logMsg.append(String.format( 4787 "xform=[%.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f], ", 4788 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)), 4789 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)), 4790 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)), 4791 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)), 4792 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)), 4793 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)), 4794 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)), 4795 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)), 4796 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2)))); 4797 } else { 4798 logMsg.append("xform=[], "); 4799 } 4800 logMsg.append(String.format( 4801 Locale.getDefault(), 4802 "foc=%.1f, ", 4803 result.get(CaptureResult.LENS_FOCUS_DISTANCE))); 4804 logMsg.append(String.format( 4805 Locale.getDefault(), 4806 "zoom=%.1f, ", 4807 result.get(CaptureResult.CONTROL_ZOOM_RATIO))); 4808 logMsg.append(String.format( 4809 Locale.getDefault(), 4810 "timestamp=%d", 4811 result.get(CaptureResult.SENSOR_TIMESTAMP))); 4812 return logMsg.toString(); 4813 } 4814 4815 private class ThreeAResultHandler { 4816 private volatile boolean stopped = false; 4817 private boolean aeResultSent = false; 4818 private boolean awbResultSent = false; 4819 private boolean afResultSent = false; 4820 private CameraCharacteristics c = mCameraCharacteristics; 4821 private boolean isFixedFocusLens = ItsUtils.isFixedFocusLens(c); 4822 4823 void handleCaptureResult(CaptureRequest request, TotalCaptureResult result) 4824 throws ItsException { 4825 if (stopped) { 4826 return; 4827 } 4828 4829 if (request == null || result == null) { 4830 throw new ItsException("Request/result is invalid"); 4831 } 4832 4833 Logt.i(TAG, "TotalCaptureResult: " + buildLogString(result)); 4834 4835 synchronized (m3AStateLock) { 4836 if (result.get(CaptureResult.CONTROL_AE_STATE) != null) { 4837 mConvergedAE = result.get(CaptureResult.CONTROL_AE_STATE) 4838 == CaptureResult.CONTROL_AE_STATE_CONVERGED 4839 || result.get(CaptureResult.CONTROL_AE_STATE) 4840 == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED 4841 || result.get(CaptureResult.CONTROL_AE_STATE) 4842 == CaptureResult.CONTROL_AE_STATE_LOCKED; 4843 mLockedAE = result.get(CaptureResult.CONTROL_AE_STATE) 4844 == CaptureResult.CONTROL_AE_STATE_LOCKED; 4845 if (!mPrecaptureTriggered) { 4846 mPrecaptureTriggered = result.get(CaptureResult.CONTROL_AE_STATE) 4847 == CaptureResult.CONTROL_AE_STATE_PRECAPTURE; 4848 } 4849 if (!mConvergeAETriggered) { 4850 mConvergeAETriggered = mConvergedAE; 4851 } 4852 } 4853 if (result.get(CaptureResult.CONTROL_AF_STATE) != null) { 4854 mConvergedAF = result.get(CaptureResult.CONTROL_AF_STATE) 4855 == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED; 4856 } 4857 if (result.get(CaptureResult.CONTROL_AWB_STATE) != null) { 4858 mConvergedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) 4859 == CaptureResult.CONTROL_AWB_STATE_CONVERGED 4860 || result.get(CaptureResult.CONTROL_AWB_STATE) 4861 == CaptureResult.CONTROL_AWB_STATE_LOCKED; 4862 mLockedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) 4863 == CaptureResult.CONTROL_AWB_STATE_LOCKED; 4864 } 4865 4866 if ((mConvergedAE || !mDoAE) && mConvergedAWB 4867 && (!mDoAF || isFixedFocusLens || mConvergedAF)) { 4868 if (mSend3AResults && (!mNeedsLockedAE || mLockedAE) && !aeResultSent) { 4869 aeResultSent = true; 4870 if (result.get(CaptureResult.SENSOR_SENSITIVITY) != null 4871 && result.get(CaptureResult.SENSOR_EXPOSURE_TIME) != null) { 4872 mSocketRunnableObj.sendResponse("aeResult", 4873 String.format(Locale.getDefault(), "%d %d", 4874 result.get(CaptureResult.SENSOR_SENSITIVITY).intValue(), 4875 result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue() 4876 )); 4877 } else { 4878 Logt.i(TAG, String.format( 4879 "AE converged but NULL exposure values, sensitivity:%b," 4880 + " expTime:%b", 4881 result.get(CaptureResult.SENSOR_SENSITIVITY) == null, 4882 result.get(CaptureResult.SENSOR_EXPOSURE_TIME) == null)); 4883 } 4884 } 4885 if (mSend3AResults && !afResultSent) { 4886 afResultSent = true; 4887 if (result.get(CaptureResult.LENS_FOCUS_DISTANCE) != null) { 4888 mSocketRunnableObj.sendResponse("afResult", String.format( 4889 Locale.getDefault(), "%f", 4890 result.get(CaptureResult.LENS_FOCUS_DISTANCE) 4891 )); 4892 } else { 4893 Logt.i(TAG, "AF converged but NULL focus distance values"); 4894 } 4895 } 4896 if (mSend3AResults && (!mNeedsLockedAWB || mLockedAWB) && !awbResultSent) { 4897 awbResultSent = true; 4898 if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null 4899 && result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) { 4900 mSocketRunnableObj.sendResponse("awbResult", String.format( 4901 Locale.getDefault(), 4902 "%f %f %f %f %f %f %f %f %f %f %f %f %f", 4903 result.get(CaptureResult.COLOR_CORRECTION_GAINS) 4904 .getRed(), 4905 result.get(CaptureResult.COLOR_CORRECTION_GAINS) 4906 .getGreenEven(), 4907 result.get(CaptureResult.COLOR_CORRECTION_GAINS) 4908 .getGreenOdd(), 4909 result.get(CaptureResult.COLOR_CORRECTION_GAINS) 4910 .getBlue(), 4911 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) 4912 .getElement(0, 0)), 4913 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) 4914 .getElement(1, 0)), 4915 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) 4916 .getElement(2, 0)), 4917 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) 4918 .getElement(0, 1)), 4919 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) 4920 .getElement(1, 1)), 4921 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) 4922 .getElement(2, 1)), 4923 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) 4924 .getElement(0, 2)), 4925 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) 4926 .getElement(1, 2)), 4927 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) 4928 .getElement(2, 2)))); 4929 } else { 4930 Logt.i(TAG, String.format( 4931 "AWB converged but NULL color correction values, gains:%b," 4932 + " ccm:%b", 4933 result.get(CaptureResult.COLOR_CORRECTION_GAINS) == null, 4934 result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) 4935 == null)); 4936 } 4937 } 4938 } 4939 } 4940 4941 mInterlock3A.open(); 4942 } 4943 4944 void stop() { 4945 stopped = true; 4946 } 4947 } 4948 4949 private class ThreeAResultListener extends CaptureResultListener { 4950 private ThreeAResultHandler mThreeAResultHandler = new ThreeAResultHandler(); 4951 4952 @Override 4953 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 4954 long timestamp, long frameNumber) { 4955 } 4956 4957 @Override 4958 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 4959 TotalCaptureResult result) { 4960 try { 4961 mThreeAResultHandler.handleCaptureResult(request, result); 4962 } catch (ItsException e) { 4963 Logt.e(TAG, "Script error: ", e); 4964 } catch (Exception e) { 4965 Logt.e(TAG, "Script error: ", e); 4966 } 4967 } 4968 4969 @Override 4970 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 4971 CaptureFailure failure) { 4972 Logt.e(TAG, "Script error: capture failed"); 4973 } 4974 4975 public void stop() { 4976 mThreeAResultHandler.stop(); 4977 } 4978 } 4979 4980 private class ExtensionsThreeAResultListener extends ExtensionCaptureResultListener { 4981 private ThreeAResultHandler mThreeAResultHandler = new ThreeAResultHandler(); 4982 @Override 4983 public void onCaptureStarted(CameraExtensionSession session, CaptureRequest request, 4984 long timestamp) { 4985 } 4986 4987 @Override 4988 public void onCaptureResultAvailable(CameraExtensionSession session, CaptureRequest request, 4989 TotalCaptureResult result) { 4990 try { 4991 mThreeAResultHandler.handleCaptureResult(request, result); 4992 } catch (ItsException e) { 4993 Logt.e(TAG, "Script error: ", e); 4994 } catch (Exception e) { 4995 Logt.e(TAG, "Script error: ", e); 4996 } 4997 } 4998 4999 @Override 5000 public void onCaptureFailed(CameraExtensionSession session, CaptureRequest request) { 5001 Logt.e(TAG, "Script error: capture failed"); 5002 } 5003 5004 public void stop() { 5005 mThreeAResultHandler.stop(); 5006 } 5007 } 5008 5009 private class PreviewFrameCaptureResultListener extends CaptureResultListener { 5010 private CountDownLatch mFrameCaptureLatch; 5011 private CaptureRequest mCaptureRequest = null; 5012 private TotalCaptureResult mCaptureResult = null; 5013 5014 PreviewFrameCaptureResultListener(CountDownLatch frameCaptureLatch) { 5015 mFrameCaptureLatch = frameCaptureLatch; 5016 } 5017 5018 @Override 5019 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 5020 long timestamp, long frameNumber) { 5021 Log.d(TAG, "PreviewFrameCaptureResultListener [onCaptureStarted]"); 5022 } 5023 5024 @Override 5025 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 5026 TotalCaptureResult result) { 5027 Log.d(TAG, "PreviewFrameCaptureResultListener [onCaptureCompleted]"); 5028 try { 5029 if (request == null || result == null) { 5030 throw new ItsException("Request/Result is invalid"); 5031 } 5032 Logt.i(TAG, buildLogString(result)); 5033 mCaptureRequest = request; 5034 mCaptureResult = result; 5035 mFrameCaptureLatch.countDown(); 5036 } catch (ItsException e) { 5037 throw new ItsRuntimeException("Error handling capture result", e); 5038 } 5039 } 5040 5041 @Override 5042 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 5043 CaptureFailure failure) { 5044 Logt.e(TAG, "Script error: capture failed"); 5045 } 5046 5047 } 5048 5049 private class ExtensionPreviewFrameCaptureResultListener 5050 extends ExtensionCaptureResultListener { 5051 private CountDownLatch mFrameCaptureLatch; 5052 private CaptureRequest mCaptureRequest = null; 5053 private TotalCaptureResult mCaptureResult = null; 5054 private final int mCameraExtensionMode; 5055 5056 ExtensionPreviewFrameCaptureResultListener(CountDownLatch frameCaptureLatch, 5057 int cameraExtensionMode) { 5058 mFrameCaptureLatch = frameCaptureLatch; 5059 mCameraExtensionMode = cameraExtensionMode; 5060 } 5061 5062 @Override 5063 public void onCaptureStarted(CameraExtensionSession session, CaptureRequest request, 5064 long timestamp) { 5065 try { 5066 if (request == null) { 5067 throw new ItsException("Request is invalid"); 5068 } 5069 mFrameCaptureLatch.countDown(); 5070 } catch (ItsException e) { 5071 Logt.e(TAG, "Script error: ", e); 5072 } 5073 5074 } 5075 5076 @Override 5077 public void onCaptureResultAvailable(CameraExtensionSession session, CaptureRequest request, 5078 TotalCaptureResult result) { 5079 try { 5080 if (request == null || result == null) { 5081 throw new ItsException("Request/result is invalid"); 5082 } 5083 mCaptureRequest = request; 5084 mCaptureResult = result; 5085 Logt.i(TAG, buildLogString(result, true, mCameraExtensionMode)); 5086 } catch (ItsException e) { 5087 Logt.e(TAG, "Script error: ", e); 5088 } 5089 } 5090 5091 @Override 5092 public void onCaptureFailed(CameraExtensionSession session, CaptureRequest request) { 5093 Logt.e(TAG, "Script error: capture failed"); 5094 } 5095 } 5096 5097 class RecordingResultListener extends CaptureResultListener { 5098 private Map<Long, RecordingResult> mTimestampToCaptureResultsMap = 5099 new ConcurrentHashMap<>(); 5100 5101 /** 5102 * Time to wait for autofocus to converge. 5103 */ 5104 private static final long PREVIEW_AUTOFOCUS_TIMEOUT_MS = 1000; 5105 5106 /** 5107 * {@link ConditionVariable} to open when autofocus has converged. 5108 */ 5109 private ConditionVariable mAfConverged = new ConditionVariable(); 5110 5111 @Override 5112 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 5113 long timestamp, long frameNumber) { 5114 } 5115 5116 @Override 5117 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 5118 TotalCaptureResult result) { 5119 try { 5120 if (request == null || result == null) { 5121 throw new ItsException("Request/Result is invalid"); 5122 } 5123 5124 Logt.i(TAG, buildLogString(result)); 5125 5126 RecordingResult partialResult = new RecordingResult(); 5127 int afState = result.get(CaptureResult.CONTROL_AF_STATE); 5128 Logt.i(TAG, "TotalCaptureResult # " + mTimestampToCaptureResultsMap.size() 5129 + " timestamp = " + result.get(CaptureResult.SENSOR_TIMESTAMP) 5130 + " z = " + result.get(CaptureResult.CONTROL_ZOOM_RATIO) 5131 + " fl = " + result.get(CaptureResult.LENS_FOCAL_LENGTH) 5132 + " phyid = " 5133 + result.get(CaptureResult.LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID) 5134 + " AE_STATE = " + result.get(CaptureResult.CONTROL_AE_STATE) 5135 + " AF_STATE = " + afState 5136 + " AWB_STATE = " + result.get(CaptureResult.CONTROL_AWB_STATE)); 5137 long timestamp = result.get(CaptureResult.SENSOR_TIMESTAMP); 5138 partialResult.addKeys(result, RecordingResult.PREVIEW_RESULT_TRACKED_KEYS); 5139 if (ItsUtils.isAtLeastV()) { 5140 partialResult.addVKeys(result); 5141 } 5142 mTimestampToCaptureResultsMap.put(timestamp, partialResult); 5143 5144 if (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || 5145 afState == CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED) { 5146 mAfConverged.open(); 5147 } 5148 } catch (ItsException e) { 5149 throw new ItsRuntimeException("Error handling capture result", e); 5150 } 5151 } 5152 5153 @Override 5154 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 5155 CaptureFailure failure) { 5156 Logt.e(TAG, "Script error: capture failed"); 5157 } 5158 5159 /** 5160 * Returns up-to-date value of recording capture results for calling thread. 5161 */ 5162 public Map<Long, RecordingResult> getCaptureResultsMap() { 5163 return mTimestampToCaptureResultsMap; 5164 } 5165 5166 /** 5167 * Blocks until the next {@link CaptureResult} that shows AF convergence. 5168 */ 5169 public boolean waitForAfConvergence() throws InterruptedException { 5170 mAfConverged.close(); 5171 return mAfConverged.block(PREVIEW_AUTOFOCUS_TIMEOUT_MS); 5172 } 5173 } 5174 5175 private final CaptureResultListener mCaptureResultListener = new CaptureResultListener() { 5176 @Override 5177 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 5178 long timestamp, long frameNumber) { 5179 } 5180 5181 @Override 5182 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 5183 TotalCaptureResult result) { 5184 try { 5185 if (request == null || result == null) { 5186 throw new ItsException("Request/result is invalid"); 5187 } 5188 5189 Logt.i(TAG, buildLogString(result)); 5190 5191 int count = mCountCapRes.getAndIncrement(); 5192 mCaptureResults[count] = result; 5193 mSocketRunnableObj.sendResponseCaptureResult(request, result, mOutputImageReaders); 5194 synchronized(mCountCallbacksRemaining) { 5195 mCountCallbacksRemaining.decrementAndGet(); 5196 mCountCallbacksRemaining.notify(); 5197 } 5198 } catch (ItsException e) { 5199 Logt.e(TAG, "Script error: ", e); 5200 } catch (Exception e) { 5201 Logt.e(TAG, "Script error: ", e); 5202 } 5203 } 5204 5205 @Override 5206 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 5207 CaptureFailure failure) { 5208 Logt.e(TAG, "Script error: capture failed"); 5209 } 5210 }; 5211 5212 private final ExtensionCaptureResultListener mExtCaptureResultListener = 5213 new ExtensionCaptureResultListener() { 5214 @Override 5215 public void onCaptureProcessStarted(CameraExtensionSession session, 5216 CaptureRequest request) { 5217 } 5218 5219 @Override 5220 public void onCaptureResultAvailable(CameraExtensionSession session, CaptureRequest request, 5221 TotalCaptureResult result) { 5222 try { 5223 if (request == null || result == null) { 5224 throw new ItsException("Request/result is invalid"); 5225 } 5226 5227 Logt.i(TAG, buildLogString(result)); 5228 5229 int count = mCountCapRes.getAndIncrement(); 5230 mCaptureResults[count] = result; 5231 mSocketRunnableObj.sendResponseCaptureResult(request, result, mOutputImageReaders); 5232 synchronized(mCountCallbacksRemaining) { 5233 mCountCallbacksRemaining.decrementAndGet(); 5234 mCountCallbacksRemaining.notify(); 5235 } 5236 } catch (ItsException e) { 5237 Logt.e(TAG, "Script error: ", e); 5238 } catch (Exception e) { 5239 Logt.e(TAG, "Script error: ", e); 5240 } 5241 } 5242 5243 @Override 5244 public void onCaptureFailed(CameraExtensionSession session, CaptureRequest request) { 5245 Logt.e(TAG, "Script error: capture failed"); 5246 } 5247 }; 5248 5249 private final ExtensionCaptureResultListener mExtAEResultListener = 5250 new ExtensionCaptureResultListener() { 5251 @Override 5252 public void onCaptureProcessStarted(CameraExtensionSession session, 5253 CaptureRequest request) { 5254 } 5255 5256 @Override 5257 public void onCaptureResultAvailable(CameraExtensionSession session, 5258 CaptureRequest request, 5259 TotalCaptureResult result) { 5260 try { 5261 if (request == null || result == null) { 5262 throw new ItsException("Request/result is invalid"); 5263 } 5264 int aeState = result.get(CaptureResult.CONTROL_AE_STATE); 5265 if (aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED 5266 || aeState == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED) { 5267 synchronized (mCountCallbacksRemaining) { 5268 mCountCallbacksRemaining.decrementAndGet(); 5269 mCountCallbacksRemaining.notify(); 5270 } 5271 } 5272 } catch (ItsException e) { 5273 Logt.e(TAG, "Script error: ", e); 5274 } catch (Exception e) { 5275 Logt.e(TAG, "Script error: ", e); 5276 } 5277 } 5278 5279 @Override 5280 public void onCaptureFailed(CameraExtensionSession session, 5281 CaptureRequest request) { 5282 Logt.e(TAG, "Script error: capture failed"); 5283 } 5284 }; 5285 5286 private static class CaptureCallbackWaiter extends CameraCaptureSession.CaptureCallback { 5287 private final LinkedBlockingQueue<TotalCaptureResult> mResultQueue = 5288 new LinkedBlockingQueue<>(); 5289 5290 @Override 5291 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 5292 long timestamp, long frameNumber) { 5293 } 5294 5295 @Override 5296 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 5297 TotalCaptureResult result) { 5298 try { 5299 mResultQueue.put(result); 5300 } catch (InterruptedException e) { 5301 throw new UnsupportedOperationException( 5302 "Can't handle InterruptedException in onImageAvailable"); 5303 } 5304 } 5305 5306 @Override 5307 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 5308 CaptureFailure failure) { 5309 Logt.e(TAG, "Script error: capture failed"); 5310 } 5311 5312 public TotalCaptureResult getResult(long timeoutMs) throws ItsException { 5313 TotalCaptureResult result; 5314 try { 5315 result = mResultQueue.poll(timeoutMs, TimeUnit.MILLISECONDS); 5316 } catch (InterruptedException e) { 5317 throw new ItsException(e); 5318 } 5319 5320 if (result == null) { 5321 throw new ItsException("Getting an image timed out after " + timeoutMs + 5322 "ms"); 5323 } 5324 5325 return result; 5326 } 5327 } 5328 5329 private static class ImageReaderListenerWaiter implements ImageReader.OnImageAvailableListener { 5330 private final LinkedBlockingQueue<Image> mImageQueue = new LinkedBlockingQueue<>(); 5331 5332 @Override 5333 public void onImageAvailable(ImageReader reader) { 5334 try { 5335 mImageQueue.put(reader.acquireNextImage()); 5336 } catch (InterruptedException e) { 5337 throw new UnsupportedOperationException( 5338 "Can't handle InterruptedException in onImageAvailable"); 5339 } 5340 } 5341 5342 public Image getImage(long timeoutMs) throws ItsException { 5343 Image image; 5344 try { 5345 image = mImageQueue.poll(timeoutMs, TimeUnit.MILLISECONDS); 5346 } catch (InterruptedException e) { 5347 throw new ItsException(e); 5348 } 5349 5350 if (image == null) { 5351 throw new ItsException("Getting an image timed out after " + timeoutMs + 5352 "ms"); 5353 } 5354 return image; 5355 } 5356 } 5357 5358 private int getReprocessInputFormat(JSONObject params) throws ItsException { 5359 String reprocessFormat; 5360 try { 5361 reprocessFormat = params.getString("reprocessFormat"); 5362 } catch (org.json.JSONException e) { 5363 throw new ItsException("Error parsing reprocess format: " + e); 5364 } 5365 5366 if (reprocessFormat.equals("yuv")) { 5367 return ImageFormat.YUV_420_888; 5368 } else if (reprocessFormat.equals("private")) { 5369 return ImageFormat.PRIVATE; 5370 } 5371 5372 throw new ItsException("Unknown reprocess format: " + reprocessFormat); 5373 } 5374 5375 private void validateCameraId(String cameraId) throws ItsException { 5376 if (mItsCameraIdList == null) { 5377 mItsCameraIdList = ItsUtils.getItsCompatibleCameraIds(mCameraManager); 5378 } 5379 if (mItsCameraIdList.mCameraIds.size() == 0) { 5380 throw new ItsException("No camera devices"); 5381 } 5382 if (!mItsCameraIdList.mCameraIds.contains(cameraId)) { 5383 throw new ItsException("Invalid cameraId " + cameraId); 5384 } 5385 } 5386 5387 private boolean isHlg10Compatible(int format) { 5388 return (format == ImageFormat.PRIVATE 5389 || format == ImageFormat.JPEG_R 5390 || format == ImageFormat.HEIC_ULTRAHDR 5391 || format == ImageFormat.YCBCR_P010); 5392 } 5393 5394 private long getEncoderTimestampOffset() { 5395 int timestampSource = mCameraCharacteristics.get( 5396 CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE); 5397 long encoderTimestampOffset = 0; 5398 if (timestampSource == CameraMetadata.SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME) { 5399 long uptimeNanos = TimeUnit.MILLISECONDS.toNanos(SystemClock.uptimeMillis()); 5400 encoderTimestampOffset = uptimeNanos - SystemClock.elapsedRealtimeNanos(); 5401 } 5402 return encoderTimestampOffset; 5403 } 5404 } 5405