1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.android.cts.verifier.camera.its; 18 19 import static android.media.MediaCodecInfo.CodecProfileLevel.HEVCProfileMain10; 20 21 import android.graphics.Bitmap; 22 import android.graphics.BitmapFactory; 23 import android.graphics.ImageFormat; 24 import android.graphics.Rect; 25 import android.hardware.camera2.CameraAccessException; 26 import android.hardware.camera2.CameraCharacteristics; 27 import android.hardware.camera2.CameraDevice; 28 import android.hardware.camera2.CameraManager; 29 import android.hardware.camera2.CameraMetadata; 30 import android.hardware.camera2.CaptureRequest; 31 import android.hardware.camera2.params.MeteringRectangle; 32 import android.hardware.camera2.params.StreamConfigurationMap; 33 import android.media.CamcorderProfile; 34 import android.media.EncoderProfiles; 35 import android.media.Image; 36 import android.media.Image.Plane; 37 import android.media.MediaCodec; 38 import android.media.MediaCodecInfo; 39 import android.media.MediaFormat; 40 import android.media.MediaMuxer; 41 import android.os.Build; 42 import android.os.Handler; 43 import android.os.HandlerThread; 44 import android.util.Log; 45 import android.util.Pair; 46 import android.util.Size; 47 48 import androidx.annotation.ChecksSdkIntAtLeast; 49 50 import com.android.ex.camera2.blocking.BlockingCameraManager; 51 import com.android.ex.camera2.blocking.BlockingStateCallback; 52 53 import org.json.JSONArray; 54 import org.json.JSONObject; 55 56 import java.io.ByteArrayOutputStream; 57 import java.nio.ByteBuffer; 58 import java.nio.charset.Charset; 59 import java.util.ArrayList; 60 import java.util.Arrays; 61 import java.util.Comparator; 62 import java.util.List; 63 import java.util.Set; 64 import java.util.concurrent.Semaphore; 65 66 public class ItsUtils { 67 public static final String TAG = ItsUtils.class.getSimpleName(); 68 // The tokenizer must be the same as CAMERA_ID_TOKENIZER in device.py 69 public static final String CAMERA_ID_TOKENIZER = "."; 70 jsonToByteBuffer(JSONObject jsonObj)71 public static ByteBuffer jsonToByteBuffer(JSONObject jsonObj) { 72 return ByteBuffer.wrap(jsonObj.toString().getBytes(Charset.defaultCharset())); 73 } 74 getJsonWeightedRectsFromArray( JSONArray a, boolean normalized, int width, int height)75 public static MeteringRectangle[] getJsonWeightedRectsFromArray( 76 JSONArray a, boolean normalized, int width, int height) 77 throws ItsException { 78 try { 79 // Returns [x0,y0,x1,y1,wgt, x0,y0,x1,y1,wgt, x0,y0,x1,y1,wgt, ...] 80 assert(a.length() % 5 == 0); 81 MeteringRectangle[] ma = new MeteringRectangle[a.length() / 5]; 82 for (int i = 0; i < a.length(); i += 5) { 83 int x,y,w,h; 84 if (normalized) { 85 x = (int)Math.floor(a.getDouble(i+0) * width + 0.5f); 86 y = (int)Math.floor(a.getDouble(i+1) * height + 0.5f); 87 w = (int)Math.floor(a.getDouble(i+2) * width + 0.5f); 88 h = (int)Math.floor(a.getDouble(i+3) * height + 0.5f); 89 } else { 90 x = a.getInt(i+0); 91 y = a.getInt(i+1); 92 w = a.getInt(i+2); 93 h = a.getInt(i+3); 94 } 95 x = Math.max(x, 0); 96 y = Math.max(y, 0); 97 w = Math.min(w, width-x); 98 h = Math.min(h, height-y); 99 int wgt = a.getInt(i+4); 100 ma[i/5] = new MeteringRectangle(x,y,w,h,wgt); 101 } 102 return ma; 103 } catch (org.json.JSONException e) { 104 throw new ItsException("JSON error: ", e); 105 } 106 } 107 getOutputSpecs(JSONObject jsonObjTop)108 public static JSONArray getOutputSpecs(JSONObject jsonObjTop) 109 throws ItsException { 110 try { 111 if (jsonObjTop.has("outputSurfaces")) { 112 return jsonObjTop.getJSONArray("outputSurfaces"); 113 } 114 return null; 115 } catch (org.json.JSONException e) { 116 throw new ItsException("JSON error: ", e); 117 } 118 } 119 getRaw16OutputSizes(CameraCharacteristics ccs)120 public static Size[] getRaw16OutputSizes(CameraCharacteristics ccs) 121 throws ItsException { 122 return getOutputSizes(ccs, ImageFormat.RAW_SENSOR, false); 123 } 124 getRaw16MaxResulolutionOutputSizes(CameraCharacteristics ccs)125 public static Size[] getRaw16MaxResulolutionOutputSizes(CameraCharacteristics ccs) 126 throws ItsException { 127 return getOutputSizes(ccs, ImageFormat.RAW_SENSOR, true); 128 } 129 getRaw10OutputSizes(CameraCharacteristics ccs)130 public static Size[] getRaw10OutputSizes(CameraCharacteristics ccs) 131 throws ItsException { 132 return getOutputSizes(ccs, ImageFormat.RAW10, false); 133 } 134 getRaw10MaxResulolutionOutputSizes(CameraCharacteristics ccs)135 public static Size[] getRaw10MaxResulolutionOutputSizes(CameraCharacteristics ccs) 136 throws ItsException { 137 return getOutputSizes(ccs, ImageFormat.RAW10, true); 138 } 139 getRaw12OutputSizes(CameraCharacteristics ccs)140 public static Size[] getRaw12OutputSizes(CameraCharacteristics ccs) 141 throws ItsException { 142 return getOutputSizes(ccs, ImageFormat.RAW12, false); 143 } 144 getJpegOutputSizes(CameraCharacteristics ccs)145 public static Size[] getJpegOutputSizes(CameraCharacteristics ccs) 146 throws ItsException { 147 return getOutputSizes(ccs, ImageFormat.JPEG, false); 148 } 149 getPrivOutputSizes(CameraCharacteristics ccs)150 public static Size[] getPrivOutputSizes(CameraCharacteristics ccs) 151 throws ItsException { 152 return getOutputSizes(ccs, ImageFormat.PRIVATE, false); 153 } 154 getHeicUltraHdrOutputSizes(CameraCharacteristics ccs)155 public static Size[] getHeicUltraHdrOutputSizes(CameraCharacteristics ccs) 156 throws ItsException { 157 return getOutputSizes(ccs, ImageFormat.HEIC_ULTRAHDR, false); 158 } 159 getYuvOutputSizes(CameraCharacteristics ccs)160 public static Size[] getYuvOutputSizes(CameraCharacteristics ccs) 161 throws ItsException { 162 return getOutputSizes(ccs, ImageFormat.YUV_420_888, false); 163 } 164 getY8OutputSizes(CameraCharacteristics ccs)165 public static Size[] getY8OutputSizes(CameraCharacteristics ccs) 166 throws ItsException { 167 return getOutputSizes(ccs, ImageFormat.Y8, false); 168 } 169 getMaxOutputSize(CameraCharacteristics ccs, int format)170 public static Size getMaxOutputSize(CameraCharacteristics ccs, int format) 171 throws ItsException { 172 return getMaxSize(getOutputSizes(ccs, format, false)); 173 } 174 getActiveArrayCropRegion(CameraCharacteristics ccs, boolean isMaximumResolution)175 public static Rect getActiveArrayCropRegion(CameraCharacteristics ccs, 176 boolean isMaximumResolution) { 177 Rect cropRegion = null; 178 if (isMaximumResolution) { 179 cropRegion = ccs.get( 180 CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION); 181 } else { 182 cropRegion = ccs.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); 183 } 184 return cropRegion; 185 } 186 getOutputSizes(CameraCharacteristics ccs, int format, boolean isMaximumResolution)187 private static Size[] getOutputSizes(CameraCharacteristics ccs, int format, 188 boolean isMaximumResolution) throws ItsException { 189 StreamConfigurationMap configMap = null; 190 if (isMaximumResolution) { 191 configMap = ccs.get( 192 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION); 193 } else { 194 configMap = ccs.get( 195 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 196 } 197 198 if (configMap == null) { 199 throw new ItsException("Failed to get stream config"); 200 } 201 Size[] normalSizes = configMap.getOutputSizes(format); 202 Size[] slowSizes = configMap.getHighResolutionOutputSizes(format); 203 Size[] allSizes = null; 204 if (normalSizes != null && slowSizes != null) { 205 allSizes = new Size[normalSizes.length + slowSizes.length]; 206 System.arraycopy(normalSizes, 0, allSizes, 0, normalSizes.length); 207 System.arraycopy(slowSizes, 0, allSizes, normalSizes.length, slowSizes.length); 208 } else if (normalSizes != null) { 209 allSizes = normalSizes; 210 } else if (slowSizes != null) { 211 allSizes = slowSizes; 212 } 213 return allSizes; 214 } 215 isFixedFocusLens(CameraCharacteristics c)216 public static boolean isFixedFocusLens(CameraCharacteristics c) { 217 Float minFocusDistance = c.get( 218 CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE); 219 return (minFocusDistance != null) && (minFocusDistance == 0.0); 220 } 221 getMaxSize(Size[] sizes)222 public static Size getMaxSize(Size[] sizes) { 223 if (sizes == null || sizes.length == 0) { 224 throw new IllegalArgumentException("sizes was empty"); 225 } 226 227 Size maxSize = sizes[0]; 228 int maxArea = maxSize.getWidth() * maxSize.getHeight(); 229 for (int i = 1; i < sizes.length; i++) { 230 int area = sizes[i].getWidth() * sizes[i].getHeight(); 231 if (area > maxArea || 232 (area == maxArea && sizes[i].getWidth() > maxSize.getWidth())) { 233 maxSize = sizes[i]; 234 maxArea = area; 235 } 236 } 237 238 return maxSize; 239 } 240 getDataFromImage(Image image, Semaphore quota)241 public static byte[] getDataFromImage(Image image, Semaphore quota) 242 throws ItsException { 243 int format = image.getFormat(); 244 int width = image.getWidth(); 245 int height = image.getHeight(); 246 byte[] data = null; 247 248 // Read image data 249 Plane[] planes = image.getPlanes(); 250 251 // Check image validity 252 if (!checkAndroidImageFormat(image)) { 253 throw new ItsException( 254 "Invalid image format passed to getDataFromImage: " + image.getFormat()); 255 } 256 257 if ((format == ImageFormat.JPEG) || (format == ImageFormat.JPEG_R)) { 258 // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer. 259 ByteBuffer buffer = planes[0].getBuffer(); 260 if (quota != null) { 261 try { 262 Logt.i(TAG, "Start waiting for quota Semaphore"); 263 quota.acquire(buffer.capacity()); 264 Logt.i(TAG, "Acquired quota Semaphore. Start reading image"); 265 } catch (java.lang.InterruptedException e) { 266 Logt.e(TAG, "getDataFromImage error acquiring memory quota. Interrupted", e); 267 } 268 } 269 data = new byte[buffer.capacity()]; 270 buffer.get(data); 271 Logt.i(TAG, "Done reading jpeg image"); 272 return data; 273 } else if (format == ImageFormat.HEIC_ULTRAHDR) { 274 // HEIC doesn't have pixelstride and rowstride, treat it as 1D buffer. 275 ByteBuffer buffer = planes[0].getBuffer(); 276 // The ITS host scripts are not typically able to support HEIC/HEIF images. 277 // We are also only interested in checking the actual captured pixels and not 278 // the format itself so transcode to the more common JPEG. 279 byte[] heicData = new byte[buffer.capacity()]; 280 buffer.get(heicData); 281 Bitmap bmp = BitmapFactory.decodeByteArray(heicData, 0, heicData.length); 282 if (bmp == null) { 283 throw new ItsException("Invalid HEIC image passed to getDataFromImage"); 284 } 285 ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); 286 if (!bmp.compress(Bitmap.CompressFormat.JPEG, 100 /*quality*/, byteStream)) { 287 throw new ItsException("Failed transcoding HEIC"); 288 } 289 if (quota != null) { 290 try { 291 Logt.i(TAG, "Start waiting for quota Semaphore"); 292 quota.acquire(byteStream.size()); 293 Logt.i(TAG, "Acquired quota Semaphore. Start reading image"); 294 } catch (java.lang.InterruptedException e) { 295 Logt.e(TAG, "getDataFromImage error acquiring memory quota. Interrupted", e); 296 } 297 } 298 data = byteStream.toByteArray(); 299 Logt.i(TAG, "Done reading heic image"); 300 return data; 301 } else if (format == ImageFormat.YUV_420_888 || format == ImageFormat.RAW_SENSOR 302 || format == ImageFormat.RAW10 || format == ImageFormat.RAW12 303 || format == ImageFormat.Y8) { 304 int offset = 0; 305 int dataSize = width * height * ImageFormat.getBitsPerPixel(format) / 8; 306 if (quota != null) { 307 try { 308 Logt.i(TAG, "Start waiting for quota Semaphore"); 309 quota.acquire(dataSize); 310 Logt.i(TAG, "Acquired quota Semaphore. Start reading image"); 311 } catch (java.lang.InterruptedException e) { 312 Logt.e(TAG, "getDataFromImage error acquiring memory quota. Interrupted", e); 313 } 314 } 315 data = new byte[dataSize]; 316 int maxRowSize = planes[0].getRowStride(); 317 for (int i = 0; i < planes.length; i++) { 318 if (maxRowSize < planes[i].getRowStride()) { 319 maxRowSize = planes[i].getRowStride(); 320 } 321 } 322 byte[] rowData = new byte[maxRowSize]; 323 for (int i = 0; i < planes.length; i++) { 324 ByteBuffer buffer = planes[i].getBuffer(); 325 int rowStride = planes[i].getRowStride(); 326 int pixelStride = planes[i].getPixelStride(); 327 int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8; 328 Logt.i(TAG, String.format( 329 "Reading image: fmt %d, plane %d, w %d, h %d," + 330 "rowStride %d, pixStride %d, bytesPerPixel %d", 331 format, i, width, height, rowStride, pixelStride, bytesPerPixel)); 332 // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling. 333 int w = (i == 0) ? width : width / 2; 334 int h = (i == 0) ? height : height / 2; 335 for (int row = 0; row < h; row++) { 336 if (pixelStride == bytesPerPixel) { 337 // Special case: optimized read of the entire row 338 int length = w * bytesPerPixel; 339 buffer.get(data, offset, length); 340 // Advance buffer the remainder of the row stride 341 if (row < h - 1) { 342 buffer.position(buffer.position() + rowStride - length); 343 } 344 offset += length; 345 } else { 346 // Generic case: should work for any pixelStride but slower. 347 // Use intermediate buffer to avoid read byte-by-byte from 348 // DirectByteBuffer, which is very bad for performance. 349 // Also need avoid access out of bound by only reading the available 350 // bytes in the bytebuffer. 351 int readSize = rowStride; 352 if (buffer.remaining() < readSize) { 353 readSize = buffer.remaining(); 354 } 355 buffer.get(rowData, 0, readSize); 356 if (pixelStride >= 1) { 357 for (int col = 0; col < w; col++) { 358 data[offset++] = rowData[col * pixelStride]; 359 } 360 } else { 361 // PixelStride of 0 can mean pixel isn't a multiple of 8 bits, for 362 // example with RAW10. Just copy the buffer, dropping any padding at 363 // the end of the row. 364 int length = (w * ImageFormat.getBitsPerPixel(format)) / 8; 365 System.arraycopy(rowData,0,data,offset,length); 366 offset += length; 367 } 368 } 369 } 370 } 371 Logt.i(TAG, String.format("Done reading image, format %d", format)); 372 return data; 373 } else { 374 throw new ItsException("Unsupported image format: " + format); 375 } 376 } 377 checkAndroidImageFormat(Image image)378 private static boolean checkAndroidImageFormat(Image image) { 379 int format = image.getFormat(); 380 Plane[] planes = image.getPlanes(); 381 switch (format) { 382 case ImageFormat.YUV_420_888: 383 case ImageFormat.NV21: 384 case ImageFormat.YV12: 385 return 3 == planes.length; 386 case ImageFormat.RAW_SENSOR: 387 case ImageFormat.RAW10: 388 case ImageFormat.RAW12: 389 case ImageFormat.JPEG: 390 case ImageFormat.JPEG_R: 391 case ImageFormat.HEIC_ULTRAHDR: 392 case ImageFormat.Y8: 393 return 1 == planes.length; 394 default: 395 return false; 396 } 397 } 398 399 public static class ItsCameraIdList { 400 // Short form camera Ids (including both CameraIdList and hidden physical cameras 401 public List<String> mCameraIds; 402 // Camera Id combos (ids from CameraIdList, and hidden physical camera Ids 403 // in the form of [logical camera id]:[hidden physical camera id] 404 public List<String> mCameraIdCombos; 405 // Primary rear and front camera Ids (as defined in MPC) 406 public String mPrimaryRearCameraId; 407 public String mPrimaryFrontCameraId; 408 } 409 getItsCompatibleCameraIds(CameraManager manager)410 public static ItsCameraIdList getItsCompatibleCameraIds(CameraManager manager) 411 throws ItsException { 412 if (manager == null) { 413 throw new IllegalArgumentException("CameraManager is null"); 414 } 415 416 ItsCameraIdList outList = new ItsCameraIdList(); 417 outList.mCameraIds = new ArrayList<String>(); 418 outList.mCameraIdCombos = new ArrayList<String>(); 419 try { 420 String[] cameraIds = manager.getCameraIdList(); 421 for (String id : cameraIds) { 422 CameraCharacteristics characteristics = manager.getCameraCharacteristics(id); 423 int[] actualCapabilities = characteristics.get( 424 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 425 boolean haveBC = false; 426 boolean isMultiCamera = false; 427 final int BACKWARD_COMPAT = 428 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE; 429 final int LOGICAL_MULTI_CAMERA = 430 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA; 431 432 final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); 433 if (facing != null) { 434 if (facing == CameraMetadata.LENS_FACING_BACK 435 && outList.mPrimaryRearCameraId == null) { 436 outList.mPrimaryRearCameraId = id; 437 } else if (facing == CameraMetadata.LENS_FACING_FRONT 438 && outList.mPrimaryFrontCameraId == null) { 439 outList.mPrimaryFrontCameraId = id; 440 } 441 } 442 443 for (int capability : actualCapabilities) { 444 if (capability == BACKWARD_COMPAT) { 445 haveBC = true; 446 } 447 if (capability == LOGICAL_MULTI_CAMERA) { 448 isMultiCamera = true; 449 } 450 } 451 452 // Skip devices that does not support BACKWARD_COMPATIBLE capability 453 if (!haveBC) continue; 454 455 int hwLevel = characteristics.get( 456 CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); 457 if (hwLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY || 458 hwLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL) { 459 // Skip LEGACY and EXTERNAL devices 460 continue; 461 } 462 outList.mCameraIds.add(id); 463 outList.mCameraIdCombos.add(id); 464 465 // Only add hidden physical cameras for multi-camera. 466 if (!isMultiCamera) continue; 467 468 float defaultFocalLength = getLogicalCameraDefaultFocalLength(manager, id); 469 Set<String> physicalIds = characteristics.getPhysicalCameraIds(); 470 for (String physicalId : physicalIds) { 471 if (Arrays.asList(cameraIds).contains(physicalId)) continue; 472 473 CameraCharacteristics physicalChar = 474 manager.getCameraCharacteristics(physicalId); 475 hwLevel = physicalChar.get( 476 CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); 477 if (hwLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY || 478 hwLevel == 479 CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL) { 480 // Skip LEGACY and EXTERNAL devices 481 continue; 482 } 483 484 int[] physicalActualCapabilities = physicalChar.get( 485 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 486 boolean physicalHaveBC = false; 487 for (int capability : physicalActualCapabilities) { 488 if (capability == BACKWARD_COMPAT) { 489 physicalHaveBC = true; 490 break; 491 } 492 } 493 if (!physicalHaveBC) { 494 continue; 495 } 496 // To reduce duplicate tests, only additionally test hidden physical cameras 497 // with different focal length compared to the default focal length of the 498 // logical camera. 499 float[] physicalFocalLengths = physicalChar.get( 500 CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS); 501 if (defaultFocalLength != physicalFocalLengths[0]) { 502 outList.mCameraIds.add(physicalId); 503 outList.mCameraIdCombos.add(id + CAMERA_ID_TOKENIZER + physicalId); 504 } 505 } 506 507 } 508 } catch (CameraAccessException e) { 509 Logt.e(TAG, 510 "Received error from camera service while checking device capabilities: " + e); 511 throw new ItsException("Failed to get device ID list", e); 512 } 513 return outList; 514 } 515 getLogicalCameraDefaultFocalLength(CameraManager manager, String cameraId)516 public static float getLogicalCameraDefaultFocalLength(CameraManager manager, 517 String cameraId) throws ItsException { 518 BlockingCameraManager blockingManager = new BlockingCameraManager(manager); 519 BlockingStateCallback listener = new BlockingStateCallback(); 520 HandlerThread cameraThread = new HandlerThread("ItsUtilThread"); 521 cameraThread.start(); 522 Handler cameraHandler = new Handler(cameraThread.getLooper()); 523 CameraDevice camera = null; 524 float defaultFocalLength = 0.0f; 525 526 try { 527 camera = blockingManager.openCamera(cameraId, listener, cameraHandler); 528 CaptureRequest.Builder previewBuilder = 529 camera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 530 defaultFocalLength = previewBuilder.get(CaptureRequest.LENS_FOCAL_LENGTH); 531 } catch (Exception e) { 532 throw new ItsException("Failed to query default focal length for logical camera", e); 533 } finally { 534 if (camera != null) { 535 camera.close(); 536 } 537 if (cameraThread != null) { 538 cameraThread.quitSafely(); 539 } 540 } 541 return defaultFocalLength; 542 } 543 544 public static class MediaCodecListener extends MediaCodec.Callback { 545 private final MediaMuxer mMediaMuxer; 546 private final Object mCondition; 547 private int mTrackId = -1; 548 private boolean mEndOfStream = false; 549 MediaCodecListener(MediaMuxer mediaMuxer, Object condition)550 public MediaCodecListener(MediaMuxer mediaMuxer, Object condition) { 551 mMediaMuxer = mediaMuxer; 552 mCondition = condition; 553 } 554 555 @Override onInputBufferAvailable(MediaCodec codec, int index)556 public void onInputBufferAvailable(MediaCodec codec, int index) { 557 Log.e(TAG, "Unexpected input buffer available callback!"); 558 } 559 560 @Override onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info)561 public void onOutputBufferAvailable(MediaCodec codec, int index, 562 MediaCodec.BufferInfo info) { 563 synchronized (mCondition) { 564 if (mTrackId < 0) { 565 return; 566 } 567 568 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 569 mEndOfStream = true; 570 mCondition.notifyAll(); 571 } 572 573 if (!mEndOfStream) { 574 mMediaMuxer.writeSampleData(mTrackId, codec.getOutputBuffer(index), info); 575 codec.releaseOutputBuffer(index, false); 576 } 577 } 578 } 579 580 @Override onError(MediaCodec codec, MediaCodec.CodecException e)581 public void onError(MediaCodec codec, MediaCodec.CodecException e) { 582 Log.e(TAG, "Codec error: " + e.getDiagnosticInfo()); 583 } 584 585 @Override onOutputFormatChanged(MediaCodec codec, MediaFormat format)586 public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 587 synchronized (mCondition) { 588 mTrackId = mMediaMuxer.addTrack(format); 589 mMediaMuxer.start(); 590 } 591 } 592 } 593 594 public static final long SESSION_CLOSE_TIMEOUT_MS = 3000; 595 596 // used to find a good-enough recording bitrate for a given resolution. "Good enough" for the 597 // ITS test to run its calculations and still be supported by the HAL. 598 // NOTE: Keep sorted for convenience 599 public static final List<Pair<Integer, Integer>> RESOLUTION_TO_CAMCORDER_PROFILE = List.of( 600 Pair.create(176 * 144, CamcorderProfile.QUALITY_QCIF), 601 Pair.create(320 * 240, CamcorderProfile.QUALITY_QVGA), 602 Pair.create(352 * 288, CamcorderProfile.QUALITY_CIF), 603 Pair.create(640 * 480, CamcorderProfile.QUALITY_VGA), 604 Pair.create(720 * 480, CamcorderProfile.QUALITY_480P), 605 Pair.create(1280 * 720, CamcorderProfile.QUALITY_720P), 606 Pair.create(1920 * 1080, CamcorderProfile.QUALITY_1080P), 607 Pair.create(2048 * 1080, CamcorderProfile.QUALITY_2K), 608 Pair.create(2560 * 1440, CamcorderProfile.QUALITY_QHD), 609 Pair.create(3840 * 2160, CamcorderProfile.QUALITY_2160P), 610 Pair.create(4096 * 2160, CamcorderProfile.QUALITY_4KDCI) 611 // should be safe to assume that we don't have previews over 4k 612 ); 613 614 /** 615 * Initialize a HLG10 MediaFormat instance with size, bitrate, and videoFrameRate. 616 */ initializeHLG10Format(Size videoSize, int videoBitRate, int videoFrameRate)617 public static MediaFormat initializeHLG10Format(Size videoSize, int videoBitRate, 618 int videoFrameRate) { 619 MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_HEVC, 620 videoSize.getWidth(), videoSize.getHeight()); 621 format.setInteger(MediaFormat.KEY_PROFILE, HEVCProfileMain10); 622 format.setInteger(MediaFormat.KEY_BIT_RATE, videoBitRate); 623 format.setInteger(MediaFormat.KEY_FRAME_RATE, videoFrameRate); 624 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 625 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 626 format.setInteger(MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT2020); 627 format.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_FULL); 628 format.setInteger(MediaFormat.KEY_COLOR_TRANSFER, MediaFormat.COLOR_TRANSFER_HLG); 629 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); 630 return format; 631 } 632 633 // Default bitrate to use for recordings when querying CamcorderProfile fails. 634 private static final int DEFAULT_RECORDING_BITRATE = 25_000_000; // 25 Mbps 635 636 /** 637 * Looks up a reasonable recording bitrate from {@link CamcorderProfile} for the given 638 * {@code previewSize} and {@code maxFps}. This is not the most optimal bitrate, but should be 639 * good enough for ITS tests to run their analyses. 640 */ calculateBitrate(int cameraId, Size previewSize, int maxFps)641 public static int calculateBitrate(int cameraId, Size previewSize, int maxFps) 642 throws ItsException { 643 int previewResolution = previewSize.getHeight() * previewSize.getWidth(); 644 645 List<Pair<Integer, Integer>> resToProfile = 646 new ArrayList<>(RESOLUTION_TO_CAMCORDER_PROFILE); 647 // ensure that the list is sorted in ascending order of resolution 648 resToProfile.sort(Comparator.comparingInt(a -> a.first)); 649 650 // Choose the first available resolution that is >= the requested preview size. 651 for (Pair<Integer, Integer> entry : resToProfile) { 652 if (previewResolution > entry.first) continue; 653 if (!CamcorderProfile.hasProfile(cameraId, entry.second)) continue; 654 655 EncoderProfiles profiles = CamcorderProfile.getAll( 656 String.valueOf(cameraId), entry.second); 657 if (profiles == null) continue; 658 659 List<EncoderProfiles.VideoProfile> videoProfiles = profiles.getVideoProfiles(); 660 661 // Find a profile which can achieve the requested max frame rate 662 for (EncoderProfiles.VideoProfile profile : videoProfiles) { 663 if (profile == null) continue; 664 if (profile.getFrameRate() >= maxFps) { 665 Logt.i(TAG, "Recording bitrate: " + profile.getBitrate() 666 + ", fps " + profile.getFrameRate()); 667 return profile.getBitrate(); 668 } 669 } 670 } 671 672 // TODO(b/223439995): There is a bug where some devices might populate result of 673 // CamcorderProfile.getAll with nulls even when a given quality is 674 // supported. Until this bug is fixed, fall back to the "deprecated" 675 // CamcorderProfile.get call to get the video bitrate. This logic can be 676 // removed once the bug is fixed. 677 Logt.i(TAG, "No matching EncoderProfile found. Falling back to CamcorderProfiles"); 678 // Mimic logic from above, but use CamcorderProfiles instead 679 for (Pair<Integer, Integer> entry : resToProfile) { 680 if (previewResolution > entry.first) continue; 681 if (!CamcorderProfile.hasProfile(cameraId, entry.second)) continue; 682 683 CamcorderProfile profile = CamcorderProfile.get(cameraId, entry.second); 684 if (profile == null) continue; 685 686 int profileFrameRate = profile.videoFrameRate; 687 float bitRateScale = (profileFrameRate < maxFps) 688 ? 1.0f * maxFps / profileFrameRate : 1.0f; 689 Logt.i(TAG, "Recording bitrate: " + profile.videoBitRate + " * " + bitRateScale); 690 return (int) (profile.videoBitRate * bitRateScale); 691 } 692 693 // Ideally, we should always find a Camcorder/Encoder Profile corresponding 694 // to the preview size. 695 Logt.w(TAG, "Could not find bitrate for any resolution >= " + previewSize 696 + " for cameraId " + cameraId + ". Using default bitrate"); 697 return DEFAULT_RECORDING_BITRATE; 698 } 699 700 /** 701 * Check if the device is running on at least Android V. 702 */ 703 @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.VANILLA_ICE_CREAM) isAtLeastV()704 public static boolean isAtLeastV() { 705 return Build.VERSION.SDK_INT >= Build.VERSION_CODES.VANILLA_ICE_CREAM; 706 } 707 } 708