1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.impl; 18 19 import android.annotation.NonNull; 20 import android.compat.annotation.UnsupportedAppUsage; 21 import android.graphics.ImageFormat; 22 import android.graphics.Point; 23 import android.graphics.Rect; 24 import android.hardware.camera2.CameraCharacteristics; 25 import android.hardware.camera2.CameraMetadata; 26 import android.hardware.camera2.CaptureRequest; 27 import android.hardware.camera2.CaptureResult; 28 import android.hardware.camera2.marshal.MarshalQueryable; 29 import android.hardware.camera2.marshal.MarshalRegistry; 30 import android.hardware.camera2.marshal.Marshaler; 31 import android.hardware.camera2.marshal.impl.MarshalQueryableArray; 32 import android.hardware.camera2.marshal.impl.MarshalQueryableBlackLevelPattern; 33 import android.hardware.camera2.marshal.impl.MarshalQueryableBoolean; 34 import android.hardware.camera2.marshal.impl.MarshalQueryableColorSpaceTransform; 35 import android.hardware.camera2.marshal.impl.MarshalQueryableEnum; 36 import android.hardware.camera2.marshal.impl.MarshalQueryableHighSpeedVideoConfiguration; 37 import android.hardware.camera2.marshal.impl.MarshalQueryableMeteringRectangle; 38 import android.hardware.camera2.marshal.impl.MarshalQueryableNativeByteToInteger; 39 import android.hardware.camera2.marshal.impl.MarshalQueryablePair; 40 import android.hardware.camera2.marshal.impl.MarshalQueryableParcelable; 41 import android.hardware.camera2.marshal.impl.MarshalQueryablePrimitive; 42 import android.hardware.camera2.marshal.impl.MarshalQueryableRange; 43 import android.hardware.camera2.marshal.impl.MarshalQueryableRecommendedStreamConfiguration; 44 import android.hardware.camera2.marshal.impl.MarshalQueryableRect; 45 import android.hardware.camera2.marshal.impl.MarshalQueryableReprocessFormatsMap; 46 import android.hardware.camera2.marshal.impl.MarshalQueryableRggbChannelVector; 47 import android.hardware.camera2.marshal.impl.MarshalQueryableSize; 48 import android.hardware.camera2.marshal.impl.MarshalQueryableSizeF; 49 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfiguration; 50 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfigurationDuration; 51 import android.hardware.camera2.marshal.impl.MarshalQueryableString; 52 import android.hardware.camera2.params.Capability; 53 import android.hardware.camera2.params.DeviceStateSensorOrientationMap; 54 import android.hardware.camera2.params.DynamicRangeProfiles; 55 import android.hardware.camera2.params.Face; 56 import android.hardware.camera2.params.HighSpeedVideoConfiguration; 57 import android.hardware.camera2.params.LensShadingMap; 58 import android.hardware.camera2.params.MandatoryStreamCombination; 59 import android.hardware.camera2.params.MultiResolutionStreamConfigurationMap; 60 import android.hardware.camera2.params.OisSample; 61 import android.hardware.camera2.params.RecommendedStreamConfiguration; 62 import android.hardware.camera2.params.RecommendedStreamConfigurationMap; 63 import android.hardware.camera2.params.ReprocessFormatsMap; 64 import android.hardware.camera2.params.StreamConfiguration; 65 import android.hardware.camera2.params.StreamConfigurationDuration; 66 import android.hardware.camera2.params.StreamConfigurationMap; 67 import android.hardware.camera2.params.TonemapCurve; 68 import android.hardware.camera2.utils.ArrayUtils; 69 import android.hardware.camera2.utils.TypeReference; 70 import android.location.Location; 71 import android.location.LocationManager; 72 import android.os.Build; 73 import android.os.Parcel; 74 import android.os.Parcelable; 75 import android.os.ServiceSpecificException; 76 import android.util.Log; 77 import android.util.Range; 78 import android.util.Size; 79 80 import dalvik.annotation.optimization.FastNative; 81 import dalvik.system.VMRuntime; 82 83 import java.io.IOException; 84 import java.nio.ByteBuffer; 85 import java.nio.ByteOrder; 86 import java.util.ArrayList; 87 import java.util.Arrays; 88 import java.util.Collections; 89 import java.util.HashMap; 90 import java.util.HashSet; 91 import java.util.Map; 92 import java.util.List; 93 import java.util.Objects; 94 import java.util.Set; 95 96 /** 97 * Implementation of camera metadata marshal/unmarshal across Binder to 98 * the camera service 99 */ 100 public class CameraMetadataNative implements Parcelable { 101 102 public static class Key<T> { 103 private boolean mHasTag; 104 private int mTag; 105 private long mVendorId = Long.MAX_VALUE; 106 private final Class<T> mType; 107 private final TypeReference<T> mTypeReference; 108 private final String mName; 109 private final String mFallbackName; 110 private final int mHash; 111 112 /** 113 * @hide 114 */ Key(String name, Class<T> type, long vendorId)115 public Key(String name, Class<T> type, long vendorId) { 116 if (name == null) { 117 throw new NullPointerException("Key needs a valid name"); 118 } else if (type == null) { 119 throw new NullPointerException("Type needs to be non-null"); 120 } 121 mName = name; 122 mFallbackName = null; 123 mType = type; 124 mVendorId = vendorId; 125 mTypeReference = TypeReference.createSpecializedTypeReference(type); 126 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 127 } 128 129 /** 130 * @hide 131 */ Key(String name, String fallbackName, Class<T> type)132 public Key(String name, String fallbackName, Class<T> type) { 133 if (name == null) { 134 throw new NullPointerException("Key needs a valid name"); 135 } else if (type == null) { 136 throw new NullPointerException("Type needs to be non-null"); 137 } 138 mName = name; 139 mFallbackName = fallbackName; 140 mType = type; 141 mTypeReference = TypeReference.createSpecializedTypeReference(type); 142 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 143 } 144 145 /** 146 * Visible for testing only. 147 * 148 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 149 * for application code or vendor-extended keys.</p> 150 */ Key(String name, Class<T> type)151 public Key(String name, Class<T> type) { 152 if (name == null) { 153 throw new NullPointerException("Key needs a valid name"); 154 } else if (type == null) { 155 throw new NullPointerException("Type needs to be non-null"); 156 } 157 mName = name; 158 mFallbackName = null; 159 mType = type; 160 mTypeReference = TypeReference.createSpecializedTypeReference(type); 161 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 162 } 163 164 /** 165 * Visible for testing only. 166 * 167 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 168 * for application code or vendor-extended keys.</p> 169 */ 170 @SuppressWarnings("unchecked") Key(String name, TypeReference<T> typeReference)171 public Key(String name, TypeReference<T> typeReference) { 172 if (name == null) { 173 throw new NullPointerException("Key needs a valid name"); 174 } else if (typeReference == null) { 175 throw new NullPointerException("TypeReference needs to be non-null"); 176 } 177 mName = name; 178 mFallbackName = null; 179 mType = (Class<T>)typeReference.getRawType(); 180 mTypeReference = typeReference; 181 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 182 } 183 184 /** 185 * Return a camelCase, period separated name formatted like: 186 * {@code "root.section[.subsections].name"}. 187 * 188 * <p>Built-in keys exposed by the Android SDK are always prefixed with {@code "android."}; 189 * keys that are device/platform-specific are prefixed with {@code "com."}.</p> 190 * 191 * <p>For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would 192 * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device 193 * specific key might look like {@code "com.google.nexus.data.private"}.</p> 194 * 195 * @return String representation of the key name 196 */ getName()197 public final String getName() { 198 return mName; 199 } 200 201 /** 202 * {@inheritDoc} 203 */ 204 @Override hashCode()205 public final int hashCode() { 206 return mHash; 207 } 208 209 /** 210 * Compare this key against other native keys, request keys, result keys, and 211 * characteristics keys. 212 * 213 * <p>Two keys are considered equal if their name and type reference are equal.</p> 214 * 215 * <p>Note that the equality against non-native keys is one-way. A native key may be equal 216 * to a result key; but that same result key will not be equal to a native key.</p> 217 */ 218 @SuppressWarnings("rawtypes") 219 @Override equals(Object o)220 public final boolean equals(Object o) { 221 if (this == o) { 222 return true; 223 } 224 225 if (o == null || this.hashCode() != o.hashCode()) { 226 return false; 227 } 228 229 Key<?> lhs; 230 231 if (o instanceof CaptureResult.Key) { 232 lhs = ((CaptureResult.Key)o).getNativeKey(); 233 } else if (o instanceof CaptureRequest.Key) { 234 lhs = ((CaptureRequest.Key)o).getNativeKey(); 235 } else if (o instanceof CameraCharacteristics.Key) { 236 lhs = ((CameraCharacteristics.Key)o).getNativeKey(); 237 } else if ((o instanceof Key)) { 238 lhs = (Key<?>)o; 239 } else { 240 return false; 241 } 242 243 return mName.equals(lhs.mName) && mTypeReference.equals(lhs.mTypeReference); 244 } 245 246 /** 247 * <p> 248 * Get the tag corresponding to this key. This enables insertion into the 249 * native metadata. 250 * </p> 251 * 252 * <p>This value is looked up the first time, and cached subsequently.</p> 253 * 254 * <p>This function may be called without cacheTag() if this is not a vendor key. 255 * If this is a vendor key, cacheTag() must be called first before getTag() can 256 * be called. Otherwise, mVendorId could be default (Long.MAX_VALUE) and vendor 257 * tag lookup could fail.</p> 258 * 259 * @return The tag numeric value corresponding to the string 260 */ 261 @UnsupportedAppUsage getTag()262 public final int getTag() { 263 if (!mHasTag) { 264 mTag = CameraMetadataNative.getTag(mName, mVendorId); 265 mHasTag = true; 266 } 267 return mTag; 268 } 269 270 /** 271 * Whether this key's tag is cached. 272 * 273 * @hide 274 */ 275 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) hasTag()276 public final boolean hasTag() { 277 return mHasTag; 278 } 279 280 /** 281 * Cache this key's tag. 282 * 283 * @hide 284 */ 285 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) cacheTag(int tag)286 public final void cacheTag(int tag) { 287 mHasTag = true; 288 mTag = tag; 289 } 290 291 /** 292 * Get the raw class backing the type {@code T} for this key. 293 * 294 * <p>The distinction is only important if {@code T} is a generic, e.g. 295 * {@code Range<Integer>} since the nested type will be erased.</p> 296 */ getType()297 public final Class<T> getType() { 298 // TODO: remove this; other places should use #getTypeReference() instead 299 return mType; 300 } 301 302 /** 303 * Get the vendor tag provider id. 304 * 305 * @hide 306 */ getVendorId()307 public final long getVendorId() { 308 return mVendorId; 309 } 310 311 /** 312 * Get the type reference backing the type {@code T} for this key. 313 * 314 * <p>The distinction is only important if {@code T} is a generic, e.g. 315 * {@code Range<Integer>} since the nested type will be retained.</p> 316 */ getTypeReference()317 public final TypeReference<T> getTypeReference() { 318 return mTypeReference; 319 } 320 } 321 322 private static final String TAG = "CameraMetadataJV"; 323 private static final boolean DEBUG = false; 324 325 // this should be in sync with HAL_PIXEL_FORMAT_BLOB defined in graphics.h 326 public static final int NATIVE_JPEG_FORMAT = 0x21; 327 328 private static final String CELLID_PROCESS = "CELLID"; 329 private static final String GPS_PROCESS = "GPS"; 330 private static final int FACE_LANDMARK_SIZE = 6; 331 332 private static final int MANDATORY_STREAM_CONFIGURATIONS_DEFAULT = 0; 333 private static final int MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION = 1; 334 private static final int MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT = 2; 335 private static final int MANDATORY_STREAM_CONFIGURATIONS_10BIT = 3; 336 private static final int MANDATORY_STREAM_CONFIGURATIONS_USE_CASE = 4; 337 private static final int MANDATORY_STREAM_CONFIGURATIONS_PREVIEW_STABILIZATION = 5; 338 translateLocationProviderToProcess(final String provider)339 private static String translateLocationProviderToProcess(final String provider) { 340 if (provider == null) { 341 return null; 342 } 343 switch(provider) { 344 case LocationManager.GPS_PROVIDER: 345 return GPS_PROCESS; 346 case LocationManager.NETWORK_PROVIDER: 347 return CELLID_PROCESS; 348 default: 349 return null; 350 } 351 } 352 translateProcessToLocationProvider(final String process)353 private static String translateProcessToLocationProvider(final String process) { 354 if (process == null) { 355 return null; 356 } 357 switch(process) { 358 case GPS_PROCESS: 359 return LocationManager.GPS_PROVIDER; 360 case CELLID_PROCESS: 361 return LocationManager.NETWORK_PROVIDER; 362 default: 363 return null; 364 } 365 } 366 CameraMetadataNative()367 public CameraMetadataNative() { 368 super(); 369 mMetadataPtr = nativeAllocate(); 370 if (mMetadataPtr == 0) { 371 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 372 } 373 updateNativeAllocation(); 374 } 375 376 /** 377 * Copy constructor - clone metadata 378 */ CameraMetadataNative(CameraMetadataNative other)379 public CameraMetadataNative(CameraMetadataNative other) { 380 super(); 381 mMetadataPtr = nativeAllocateCopy(other.mMetadataPtr); 382 if (mMetadataPtr == 0) { 383 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 384 } 385 updateNativeAllocation(); 386 } 387 388 /** 389 * Move the contents from {@code other} into a new camera metadata instance.</p> 390 * 391 * <p>After this call, {@code other} will become empty.</p> 392 * 393 * @param other the previous metadata instance which will get pilfered 394 * @return a new metadata instance with the values from {@code other} moved into it 395 */ move(CameraMetadataNative other)396 public static CameraMetadataNative move(CameraMetadataNative other) { 397 CameraMetadataNative newObject = new CameraMetadataNative(); 398 newObject.swap(other); 399 return newObject; 400 } 401 402 /** 403 * Set all metadata values in the destination argument by using the corresponding 404 * values from the source. Metadata tags present in the destination and absent 405 * from the source will remain unmodified. 406 * 407 * @param dst Destination metadata 408 * @param src Source metadata 409 * @hide 410 */ update(CameraMetadataNative dst, CameraMetadataNative src)411 public static void update(CameraMetadataNative dst, CameraMetadataNative src) { 412 nativeUpdate(dst.mMetadataPtr, src.mMetadataPtr); 413 } 414 415 public static final @android.annotation.NonNull Parcelable.Creator<CameraMetadataNative> CREATOR = 416 new Parcelable.Creator<CameraMetadataNative>() { 417 @Override 418 public CameraMetadataNative createFromParcel(Parcel in) { 419 CameraMetadataNative metadata = new CameraMetadataNative(); 420 metadata.readFromParcel(in); 421 return metadata; 422 } 423 424 @Override 425 public CameraMetadataNative[] newArray(int size) { 426 return new CameraMetadataNative[size]; 427 } 428 }; 429 430 @Override describeContents()431 public int describeContents() { 432 return 0; 433 } 434 435 @Override writeToParcel(Parcel dest, int flags)436 public void writeToParcel(Parcel dest, int flags) { 437 nativeWriteToParcel(dest, mMetadataPtr); 438 } 439 440 /** 441 * @hide 442 */ get(CameraCharacteristics.Key<T> key)443 public <T> T get(CameraCharacteristics.Key<T> key) { 444 return get(key.getNativeKey()); 445 } 446 447 /** 448 * @hide 449 */ get(CaptureResult.Key<T> key)450 public <T> T get(CaptureResult.Key<T> key) { 451 return get(key.getNativeKey()); 452 } 453 454 /** 455 * @hide 456 */ get(CaptureRequest.Key<T> key)457 public <T> T get(CaptureRequest.Key<T> key) { 458 return get(key.getNativeKey()); 459 } 460 461 /** 462 * Look-up a metadata field value by its key. 463 * 464 * @param key a non-{@code null} key instance 465 * @return the field corresponding to the {@code key}, or {@code null} if no value was set 466 */ get(Key<T> key)467 public <T> T get(Key<T> key) { 468 Objects.requireNonNull(key, "key must not be null"); 469 470 // Check if key has been overridden to use a wrapper class on the java side. 471 GetCommand g = sGetCommandMap.get(key); 472 if (g != null) { 473 return g.getValue(this, key); 474 } 475 return getBase(key); 476 } 477 readFromParcel(Parcel in)478 public void readFromParcel(Parcel in) { 479 nativeReadFromParcel(in, mMetadataPtr); 480 updateNativeAllocation(); 481 } 482 483 /** 484 * Set the global client-side vendor tag descriptor to allow use of vendor 485 * tags in camera applications. 486 * 487 * @throws ServiceSpecificException 488 * @hide 489 */ setupGlobalVendorTagDescriptor()490 public static void setupGlobalVendorTagDescriptor() throws ServiceSpecificException { 491 int err = nativeSetupGlobalVendorTagDescriptor(); 492 if (err != 0) { 493 throw new ServiceSpecificException(err, "Failure to set up global vendor tags"); 494 } 495 } 496 497 /** 498 * Set the global client-side vendor tag descriptor to allow use of vendor 499 * tags in camera applications. 500 * 501 * @return int An error code corresponding to one of the 502 * {@link ICameraService} error constants, or 0 on success. 503 */ nativeSetupGlobalVendorTagDescriptor()504 private static native int nativeSetupGlobalVendorTagDescriptor(); 505 506 /** 507 * Set a camera metadata field to a value. The field definitions can be 508 * found in {@link CameraCharacteristics}, {@link CaptureResult}, and 509 * {@link CaptureRequest}. 510 * 511 * @param key The metadata field to write. 512 * @param value The value to set the field to, which must be of a matching 513 * type to the key. 514 */ set(Key<T> key, T value)515 public <T> void set(Key<T> key, T value) { 516 SetCommand s = sSetCommandMap.get(key); 517 if (s != null) { 518 s.setValue(this, value); 519 return; 520 } 521 522 setBase(key, value); 523 } 524 set(CaptureRequest.Key<T> key, T value)525 public <T> void set(CaptureRequest.Key<T> key, T value) { 526 set(key.getNativeKey(), value); 527 } 528 set(CaptureResult.Key<T> key, T value)529 public <T> void set(CaptureResult.Key<T> key, T value) { 530 set(key.getNativeKey(), value); 531 } 532 set(CameraCharacteristics.Key<T> key, T value)533 public <T> void set(CameraCharacteristics.Key<T> key, T value) { 534 set(key.getNativeKey(), value); 535 } 536 537 // Keep up-to-date with camera_metadata.h 538 /** 539 * @hide 540 */ 541 public static final int TYPE_BYTE = 0; 542 /** 543 * @hide 544 */ 545 public static final int TYPE_INT32 = 1; 546 /** 547 * @hide 548 */ 549 public static final int TYPE_FLOAT = 2; 550 /** 551 * @hide 552 */ 553 public static final int TYPE_INT64 = 3; 554 /** 555 * @hide 556 */ 557 public static final int TYPE_DOUBLE = 4; 558 /** 559 * @hide 560 */ 561 public static final int TYPE_RATIONAL = 5; 562 /** 563 * @hide 564 */ 565 public static final int NUM_TYPES = 6; 566 close()567 private void close() { 568 // Delete native pointer, but does not clear it 569 nativeClose(mMetadataPtr); 570 mMetadataPtr = 0; 571 572 if (mBufferSize > 0) { 573 VMRuntime.getRuntime().registerNativeFree(mBufferSize); 574 } 575 mBufferSize = 0; 576 } 577 getBase(CameraCharacteristics.Key<T> key)578 private <T> T getBase(CameraCharacteristics.Key<T> key) { 579 return getBase(key.getNativeKey()); 580 } 581 getBase(CaptureResult.Key<T> key)582 private <T> T getBase(CaptureResult.Key<T> key) { 583 return getBase(key.getNativeKey()); 584 } 585 getBase(CaptureRequest.Key<T> key)586 private <T> T getBase(CaptureRequest.Key<T> key) { 587 return getBase(key.getNativeKey()); 588 } 589 getBase(Key<T> key)590 private <T> T getBase(Key<T> key) { 591 int tag; 592 if (key.hasTag()) { 593 tag = key.getTag(); 594 } else { 595 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.getName()); 596 key.cacheTag(tag); 597 } 598 byte[] values = readValues(tag); 599 if (values == null) { 600 // If the key returns null, use the fallback key if exists. 601 // This is to support old key names for the newly published keys. 602 if (key.mFallbackName == null) { 603 return null; 604 } 605 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.mFallbackName); 606 values = readValues(tag); 607 if (values == null) { 608 return null; 609 } 610 } 611 612 int nativeType = nativeGetTypeFromTagLocal(mMetadataPtr, tag); 613 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 614 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 615 return marshaler.unmarshal(buffer); 616 } 617 618 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 619 // metadata. 620 private static final HashMap<Key<?>, GetCommand> sGetCommandMap = 621 new HashMap<Key<?>, GetCommand>(); 622 static { 623 sGetCommandMap.put( GetCommand()624 CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), new GetCommand() { 625 @Override 626 @SuppressWarnings("unchecked") 627 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 628 return (T) metadata.getAvailableFormats(); 629 } 630 }); 631 sGetCommandMap.put( GetCommand()632 CaptureResult.STATISTICS_FACES.getNativeKey(), new GetCommand() { 633 @Override 634 @SuppressWarnings("unchecked") 635 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 636 return (T) metadata.getFaces(); 637 } 638 }); 639 sGetCommandMap.put( GetCommand()640 CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), new GetCommand() { 641 @Override 642 @SuppressWarnings("unchecked") 643 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 644 return (T) metadata.getFaceRectangles(); 645 } 646 }); 647 sGetCommandMap.put( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey()648 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey(), 649 new GetCommand() { 650 @Override 651 @SuppressWarnings("unchecked") 652 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 653 return (T) metadata.getStreamConfigurationMap(); 654 } 655 }); 656 sGetCommandMap.put( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION.getNativeKey()657 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION.getNativeKey(), 658 new GetCommand() { 659 @Override 660 @SuppressWarnings("unchecked") 661 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 662 return (T) metadata.getStreamConfigurationMapMaximumResolution(); 663 } 664 }); 665 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey()666 CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey(), 667 new GetCommand() { 668 @Override 669 @SuppressWarnings("unchecked") 670 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 671 return (T) metadata.getMandatoryStreamCombinations(); 672 } 673 }); 674 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_CONCURRENT_STREAM_COMBINATIONS.getNativeKey()675 CameraCharacteristics.SCALER_MANDATORY_CONCURRENT_STREAM_COMBINATIONS.getNativeKey(), 676 new GetCommand() { 677 @Override 678 @SuppressWarnings("unchecked") 679 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 680 return (T) metadata.getMandatoryConcurrentStreamCombinations(); 681 } 682 }); 683 684 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_TEN_BIT_OUTPUT_STREAM_COMBINATIONS.getNativeKey()685 CameraCharacteristics.SCALER_MANDATORY_TEN_BIT_OUTPUT_STREAM_COMBINATIONS.getNativeKey(), 686 new GetCommand() { 687 @Override 688 @SuppressWarnings("unchecked") 689 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 690 return (T) metadata.getMandatory10BitStreamCombinations(); 691 } 692 }); 693 694 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_MAXIMUM_RESOLUTION_STREAM_COMBINATIONS.getNativeKey()695 CameraCharacteristics.SCALER_MANDATORY_MAXIMUM_RESOLUTION_STREAM_COMBINATIONS.getNativeKey(), 696 new GetCommand() { 697 @Override 698 @SuppressWarnings("unchecked") 699 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 700 return (T) metadata.getMandatoryMaximumResolutionStreamCombinations(); 701 } 702 }); 703 704 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_USE_CASE_STREAM_COMBINATIONS.getNativeKey()705 CameraCharacteristics.SCALER_MANDATORY_USE_CASE_STREAM_COMBINATIONS.getNativeKey(), 706 new GetCommand() { 707 @Override 708 @SuppressWarnings("unchecked") 709 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 710 return (T) metadata.getMandatoryUseCaseStreamCombinations(); 711 } 712 }); 713 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_PREVIEW_STABILIZATION_OUTPUT_STREAM_COMBINATIONS.getNativeKey()714 CameraCharacteristics.SCALER_MANDATORY_PREVIEW_STABILIZATION_OUTPUT_STREAM_COMBINATIONS.getNativeKey(), 715 new GetCommand() { 716 @Override 717 @SuppressWarnings("unchecked") 718 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 719 return (T) metadata.getMandatoryPreviewStabilizationStreamCombinations(); 720 } 721 }); 722 723 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey()724 CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey(), new GetCommand() { 725 @Override 726 @SuppressWarnings("unchecked") 727 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 728 return (T) metadata.getMaxRegions(key); 729 } 730 }); 731 sGetCommandMap.put( GetCommand()732 CameraCharacteristics.CONTROL_MAX_REGIONS_AWB.getNativeKey(), new GetCommand() { 733 @Override 734 @SuppressWarnings("unchecked") 735 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 736 return (T) metadata.getMaxRegions(key); 737 } 738 }); 739 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey()740 CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey(), new GetCommand() { 741 @Override 742 @SuppressWarnings("unchecked") 743 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 744 return (T) metadata.getMaxRegions(key); 745 } 746 }); 747 sGetCommandMap.put( GetCommand()748 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW.getNativeKey(), new GetCommand() { 749 @Override 750 @SuppressWarnings("unchecked") 751 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 752 return (T) metadata.getMaxNumOutputs(key); 753 } 754 }); 755 sGetCommandMap.put( GetCommand()756 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC.getNativeKey(), new GetCommand() { 757 @Override 758 @SuppressWarnings("unchecked") 759 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 760 return (T) metadata.getMaxNumOutputs(key); 761 } 762 }); 763 sGetCommandMap.put( CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey()764 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey(), 765 new GetCommand() { 766 @Override 767 @SuppressWarnings("unchecked") 768 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 769 return (T) metadata.getMaxNumOutputs(key); 770 } 771 }); 772 sGetCommandMap.put( GetCommand()773 CaptureRequest.TONEMAP_CURVE.getNativeKey(), new GetCommand() { 774 @Override 775 @SuppressWarnings("unchecked") 776 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 777 return (T) metadata.getTonemapCurve(); 778 } 779 }); 780 sGetCommandMap.put( GetCommand()781 CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new GetCommand() { 782 @Override 783 @SuppressWarnings("unchecked") 784 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 785 return (T) metadata.getGpsLocation(); 786 } 787 }); 788 sGetCommandMap.put( CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey()789 CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey(), 790 new GetCommand() { 791 @Override 792 @SuppressWarnings("unchecked") 793 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 794 return (T) metadata.getLensShadingMap(); 795 } 796 }); 797 sGetCommandMap.put( CameraCharacteristics.INFO_DEVICE_STATE_SENSOR_ORIENTATION_MAP.getNativeKey()798 CameraCharacteristics.INFO_DEVICE_STATE_SENSOR_ORIENTATION_MAP.getNativeKey(), 799 new GetCommand() { 800 @Override 801 @SuppressWarnings("unchecked") 802 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 803 return (T) metadata.getDeviceStateOrientationMap(); 804 } 805 }); 806 sGetCommandMap.put( CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES.getNativeKey()807 CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES.getNativeKey(), 808 new GetCommand() { 809 @Override 810 @SuppressWarnings("unchecked") 811 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 812 return (T) metadata.getDynamicRangeProfiles(); 813 } 814 }); 815 sGetCommandMap.put( CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey()816 CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey(), 817 new GetCommand() { 818 @Override 819 @SuppressWarnings("unchecked") 820 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 821 return (T) metadata.getOisSamples(); 822 } 823 }); 824 sGetCommandMap.put( CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES.getNativeKey()825 CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES.getNativeKey(), 826 new GetCommand() { 827 @Override 828 @SuppressWarnings("unchecked") 829 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 830 return (T) metadata.getExtendedSceneModeCapabilities(); 831 } 832 }); 833 sGetCommandMap.put( CameraCharacteristics.SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP.getNativeKey()834 CameraCharacteristics.SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP.getNativeKey(), 835 new GetCommand() { 836 @Override 837 @SuppressWarnings("unchecked") 838 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 839 return (T) metadata.getMultiResolutionStreamConfigurationMap(); 840 } 841 }); 842 } 843 getAvailableFormats()844 private int[] getAvailableFormats() { 845 int[] availableFormats = getBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS); 846 if (availableFormats != null) { 847 for (int i = 0; i < availableFormats.length; i++) { 848 // JPEG has different value between native and managed side, need override. 849 if (availableFormats[i] == NATIVE_JPEG_FORMAT) { 850 availableFormats[i] = ImageFormat.JPEG; 851 } 852 } 853 } 854 855 return availableFormats; 856 } 857 setFaces(Face[] faces)858 private boolean setFaces(Face[] faces) { 859 if (faces == null) { 860 return false; 861 } 862 863 int numFaces = faces.length; 864 865 // Detect if all faces are SIMPLE or not; count # of valid faces 866 boolean fullMode = true; 867 for (Face face : faces) { 868 if (face == null) { 869 numFaces--; 870 Log.w(TAG, "setFaces - null face detected, skipping"); 871 continue; 872 } 873 874 if (face.getId() == Face.ID_UNSUPPORTED) { 875 fullMode = false; 876 } 877 } 878 879 Rect[] faceRectangles = new Rect[numFaces]; 880 byte[] faceScores = new byte[numFaces]; 881 int[] faceIds = null; 882 int[] faceLandmarks = null; 883 884 if (fullMode) { 885 faceIds = new int[numFaces]; 886 faceLandmarks = new int[numFaces * FACE_LANDMARK_SIZE]; 887 } 888 889 int i = 0; 890 for (Face face : faces) { 891 if (face == null) { 892 continue; 893 } 894 895 faceRectangles[i] = face.getBounds(); 896 faceScores[i] = (byte)face.getScore(); 897 898 if (fullMode) { 899 faceIds[i] = face.getId(); 900 901 int j = 0; 902 903 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().x; 904 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().y; 905 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().x; 906 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().y; 907 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().x; 908 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().y; 909 } 910 911 i++; 912 } 913 914 set(CaptureResult.STATISTICS_FACE_RECTANGLES, faceRectangles); 915 set(CaptureResult.STATISTICS_FACE_IDS, faceIds); 916 set(CaptureResult.STATISTICS_FACE_LANDMARKS, faceLandmarks); 917 set(CaptureResult.STATISTICS_FACE_SCORES, faceScores); 918 919 return true; 920 } 921 getFaces()922 private Face[] getFaces() { 923 Integer faceDetectMode = get(CaptureResult.STATISTICS_FACE_DETECT_MODE); 924 byte[] faceScores = get(CaptureResult.STATISTICS_FACE_SCORES); 925 Rect[] faceRectangles = get(CaptureResult.STATISTICS_FACE_RECTANGLES); 926 int[] faceIds = get(CaptureResult.STATISTICS_FACE_IDS); 927 int[] faceLandmarks = get(CaptureResult.STATISTICS_FACE_LANDMARKS); 928 929 if (areValuesAllNull(faceDetectMode, faceScores, faceRectangles, faceIds, faceLandmarks)) { 930 return null; 931 } 932 933 if (faceDetectMode == null) { 934 Log.w(TAG, "Face detect mode metadata is null, assuming the mode is SIMPLE"); 935 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 936 } else if (faceDetectMode > CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 937 // Face detect mode is larger than FULL, assuming the mode is FULL 938 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL; 939 } else { 940 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) { 941 return new Face[0]; 942 } 943 if (faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE && 944 faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 945 Log.w(TAG, "Unknown face detect mode: " + faceDetectMode); 946 return new Face[0]; 947 } 948 } 949 950 // Face scores and rectangles are required by SIMPLE and FULL mode. 951 if (faceScores == null || faceRectangles == null) { 952 Log.w(TAG, "Expect face scores and rectangles to be non-null"); 953 return new Face[0]; 954 } else if (faceScores.length != faceRectangles.length) { 955 Log.w(TAG, String.format("Face score size(%d) doesn match face rectangle size(%d)!", 956 faceScores.length, faceRectangles.length)); 957 } 958 959 // To be safe, make number of faces is the minimal of all face info metadata length. 960 int numFaces = Math.min(faceScores.length, faceRectangles.length); 961 // Face id and landmarks are only required by FULL mode. 962 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 963 if (faceIds == null || faceLandmarks == null) { 964 Log.w(TAG, "Expect face ids and landmarks to be non-null for FULL mode," + 965 "fallback to SIMPLE mode"); 966 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 967 } else { 968 if (faceIds.length != numFaces || 969 faceLandmarks.length != numFaces * FACE_LANDMARK_SIZE) { 970 Log.w(TAG, String.format("Face id size(%d), or face landmark size(%d) don't" + 971 "match face number(%d)!", 972 faceIds.length, faceLandmarks.length * FACE_LANDMARK_SIZE, numFaces)); 973 } 974 // To be safe, make number of faces is the minimal of all face info metadata length. 975 numFaces = Math.min(numFaces, faceIds.length); 976 numFaces = Math.min(numFaces, faceLandmarks.length / FACE_LANDMARK_SIZE); 977 } 978 } 979 980 ArrayList<Face> faceList = new ArrayList<Face>(); 981 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) { 982 for (int i = 0; i < numFaces; i++) { 983 if (faceScores[i] <= Face.SCORE_MAX && 984 faceScores[i] >= Face.SCORE_MIN) { 985 faceList.add(new Face(faceRectangles[i], faceScores[i])); 986 } 987 } 988 } else { 989 // CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL 990 for (int i = 0; i < numFaces; i++) { 991 if (faceScores[i] <= Face.SCORE_MAX && 992 faceScores[i] >= Face.SCORE_MIN && 993 faceIds[i] >= 0) { 994 Point leftEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE], 995 faceLandmarks[i*FACE_LANDMARK_SIZE+1]); 996 Point rightEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+2], 997 faceLandmarks[i*FACE_LANDMARK_SIZE+3]); 998 Point mouth = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+4], 999 faceLandmarks[i*FACE_LANDMARK_SIZE+5]); 1000 Face face = new Face(faceRectangles[i], faceScores[i], faceIds[i], 1001 leftEye, rightEye, mouth); 1002 faceList.add(face); 1003 } 1004 } 1005 } 1006 Face[] faces = new Face[faceList.size()]; 1007 faceList.toArray(faces); 1008 return faces; 1009 } 1010 1011 // Face rectangles are defined as (left, top, right, bottom) instead of 1012 // (left, top, width, height) at the native level, so the normal Rect 1013 // conversion that does (l, t, w, h) -> (l, t, r, b) is unnecessary. Undo 1014 // that conversion here for just the faces. getFaceRectangles()1015 private Rect[] getFaceRectangles() { 1016 Rect[] faceRectangles = getBase(CaptureResult.STATISTICS_FACE_RECTANGLES); 1017 if (faceRectangles == null) return null; 1018 1019 Rect[] fixedFaceRectangles = new Rect[faceRectangles.length]; 1020 for (int i = 0; i < faceRectangles.length; i++) { 1021 fixedFaceRectangles[i] = new Rect( 1022 faceRectangles[i].left, 1023 faceRectangles[i].top, 1024 faceRectangles[i].right - faceRectangles[i].left, 1025 faceRectangles[i].bottom - faceRectangles[i].top); 1026 } 1027 return fixedFaceRectangles; 1028 } 1029 getLensShadingMap()1030 private LensShadingMap getLensShadingMap() { 1031 float[] lsmArray = getBase(CaptureResult.STATISTICS_LENS_SHADING_MAP); 1032 Size s = get(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE); 1033 1034 // Do not warn if lsmArray is null while s is not. This is valid. 1035 if (lsmArray == null) { 1036 return null; 1037 } 1038 1039 if (s == null) { 1040 Log.w(TAG, "getLensShadingMap - Lens shading map size was null."); 1041 return null; 1042 } 1043 1044 LensShadingMap map = new LensShadingMap(lsmArray, s.getHeight(), s.getWidth()); 1045 return map; 1046 } 1047 getDeviceStateOrientationMap()1048 private DeviceStateSensorOrientationMap getDeviceStateOrientationMap() { 1049 long[] mapArray = getBase(CameraCharacteristics.INFO_DEVICE_STATE_ORIENTATIONS); 1050 1051 // Do not warn if map is null while s is not. This is valid. 1052 if (mapArray == null) { 1053 return null; 1054 } 1055 1056 DeviceStateSensorOrientationMap map = new DeviceStateSensorOrientationMap(mapArray); 1057 return map; 1058 } 1059 getDynamicRangeProfiles()1060 private DynamicRangeProfiles getDynamicRangeProfiles() { 1061 long[] profileArray = getBase( 1062 CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP); 1063 1064 if (profileArray == null) { 1065 return null; 1066 } 1067 1068 return new DynamicRangeProfiles(profileArray); 1069 } 1070 getGpsLocation()1071 private Location getGpsLocation() { 1072 String processingMethod = get(CaptureResult.JPEG_GPS_PROCESSING_METHOD); 1073 double[] coords = get(CaptureResult.JPEG_GPS_COORDINATES); 1074 Long timeStamp = get(CaptureResult.JPEG_GPS_TIMESTAMP); 1075 1076 if (areValuesAllNull(processingMethod, coords, timeStamp)) { 1077 return null; 1078 } 1079 1080 Location l = new Location(translateProcessToLocationProvider(processingMethod)); 1081 if (timeStamp != null) { 1082 // Location expects timestamp in [ms.] 1083 l.setTime(timeStamp * 1000); 1084 } else { 1085 Log.w(TAG, "getGpsLocation - No timestamp for GPS location."); 1086 } 1087 1088 if (coords != null) { 1089 l.setLatitude(coords[0]); 1090 l.setLongitude(coords[1]); 1091 l.setAltitude(coords[2]); 1092 } else { 1093 Log.w(TAG, "getGpsLocation - No coordinates for GPS location"); 1094 } 1095 1096 return l; 1097 } 1098 setGpsLocation(Location l)1099 private boolean setGpsLocation(Location l) { 1100 if (l == null) { 1101 return false; 1102 } 1103 1104 double[] coords = { l.getLatitude(), l.getLongitude(), l.getAltitude() }; 1105 String processMethod = translateLocationProviderToProcess(l.getProvider()); 1106 //JPEG_GPS_TIMESTAMP expects sec. instead of msec. 1107 long timestamp = l.getTime() / 1000; 1108 1109 set(CaptureRequest.JPEG_GPS_TIMESTAMP, timestamp); 1110 set(CaptureRequest.JPEG_GPS_COORDINATES, coords); 1111 1112 if (processMethod == null) { 1113 Log.w(TAG, "setGpsLocation - No process method, Location is not from a GPS or NETWORK" + 1114 "provider"); 1115 } else { 1116 setBase(CaptureRequest.JPEG_GPS_PROCESSING_METHOD, processMethod); 1117 } 1118 return true; 1119 } 1120 parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, StreamConfigurationMap fullMap, boolean isDepth, ArrayList<ArrayList<StreamConfiguration>> streamConfigList, ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList, ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList, boolean[] supportsPrivate)1121 private void parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, 1122 StreamConfigurationMap fullMap, boolean isDepth, 1123 ArrayList<ArrayList<StreamConfiguration>> /*out*/streamConfigList, 1124 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamDurationList, 1125 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamStallList, 1126 boolean[] /*out*/supportsPrivate) { 1127 1128 streamConfigList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1129 streamDurationList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1130 streamStallList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1131 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1132 streamConfigList.add(new ArrayList<StreamConfiguration> ()); 1133 streamDurationList.add(new ArrayList<StreamConfigurationDuration> ()); 1134 streamStallList.add(new ArrayList<StreamConfigurationDuration> ()); 1135 } 1136 1137 for (RecommendedStreamConfiguration c : configurations) { 1138 int width = c.getWidth(); 1139 int height = c.getHeight(); 1140 int internalFormat = c.getFormat(); 1141 int publicFormat = 1142 (isDepth) ? StreamConfigurationMap.depthFormatToPublic(internalFormat) : 1143 StreamConfigurationMap.imageFormatToPublic(internalFormat); 1144 Size sz = new Size(width, height); 1145 int usecaseBitmap = c.getUsecaseBitmap(); 1146 1147 if (!c.isInput()) { 1148 StreamConfigurationDuration minDurationConfiguration = null; 1149 StreamConfigurationDuration stallDurationConfiguration = null; 1150 1151 StreamConfiguration streamConfiguration = new StreamConfiguration(internalFormat, 1152 width, height, /*input*/ false); 1153 1154 long minFrameDuration = fullMap.getOutputMinFrameDuration(publicFormat, sz); 1155 if (minFrameDuration > 0) { 1156 minDurationConfiguration = new StreamConfigurationDuration(internalFormat, 1157 width, height, minFrameDuration); 1158 } 1159 1160 long stallDuration = fullMap.getOutputStallDuration(publicFormat, sz); 1161 if (stallDuration > 0) { 1162 stallDurationConfiguration = new StreamConfigurationDuration(internalFormat, 1163 width, height, stallDuration); 1164 } 1165 1166 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1167 if ((usecaseBitmap & (1 << i)) != 0) { 1168 ArrayList<StreamConfiguration> sc = streamConfigList.get(i); 1169 sc.add(streamConfiguration); 1170 1171 if (minFrameDuration > 0) { 1172 ArrayList<StreamConfigurationDuration> scd = streamDurationList.get(i); 1173 scd.add(minDurationConfiguration); 1174 } 1175 1176 if (stallDuration > 0) { 1177 ArrayList<StreamConfigurationDuration> scs = streamStallList.get(i); 1178 scs.add(stallDurationConfiguration); 1179 } 1180 1181 if ((supportsPrivate != null) && !supportsPrivate[i] && 1182 (publicFormat == ImageFormat.PRIVATE)) { 1183 supportsPrivate[i] = true; 1184 } 1185 } 1186 } 1187 } else { 1188 if (usecaseBitmap != (1 << RecommendedStreamConfigurationMap.USECASE_ZSL)) { 1189 throw new IllegalArgumentException("Recommended input stream configurations " + 1190 "should only be advertised in the ZSL use case!"); 1191 } 1192 1193 ArrayList<StreamConfiguration> sc = streamConfigList.get( 1194 RecommendedStreamConfigurationMap.USECASE_ZSL); 1195 sc.add(new StreamConfiguration(internalFormat, 1196 width, height, /*input*/ true)); 1197 } 1198 } 1199 } 1200 1201 private class StreamConfigurationData { 1202 StreamConfiguration [] streamConfigurationArray = null; 1203 StreamConfigurationDuration [] minDurationArray = null; 1204 StreamConfigurationDuration [] stallDurationArray = null; 1205 } 1206 initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, StreamConfigurationData scData)1207 public void initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, 1208 ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, 1209 StreamConfigurationData /*out*/scData) { 1210 if ((scData == null) || (sc == null)) { 1211 return; 1212 } 1213 1214 scData.streamConfigurationArray = new StreamConfiguration[sc.size()]; 1215 scData.streamConfigurationArray = sc.toArray(scData.streamConfigurationArray); 1216 1217 if ((scd != null) && !scd.isEmpty()) { 1218 scData.minDurationArray = new StreamConfigurationDuration[scd.size()]; 1219 scData.minDurationArray = scd.toArray(scData.minDurationArray); 1220 } else { 1221 scData.minDurationArray = new StreamConfigurationDuration[0]; 1222 } 1223 1224 if ((scs != null) && !scs.isEmpty()) { 1225 scData.stallDurationArray = new StreamConfigurationDuration[scs.size()]; 1226 scData.stallDurationArray = scs.toArray(scData.stallDurationArray); 1227 } else { 1228 scData.stallDurationArray = new StreamConfigurationDuration[0]; 1229 } 1230 } 1231 1232 /** 1233 * Retrieve the list of recommended stream configurations. 1234 * 1235 * @return A list of recommended stream configuration maps for each common use case or null 1236 * in case the recommended stream configurations are invalid or incomplete. 1237 * @hide 1238 */ getRecommendedStreamConfigurations()1239 public ArrayList<RecommendedStreamConfigurationMap> getRecommendedStreamConfigurations() { 1240 RecommendedStreamConfiguration[] configurations = getBase( 1241 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS); 1242 RecommendedStreamConfiguration[] depthConfigurations = getBase( 1243 CameraCharacteristics.DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS); 1244 if ((configurations == null) && (depthConfigurations == null)) { 1245 return null; 1246 } 1247 1248 StreamConfigurationMap fullMap = getStreamConfigurationMap(); 1249 ArrayList<RecommendedStreamConfigurationMap> recommendedConfigurations = 1250 new ArrayList<RecommendedStreamConfigurationMap> (); 1251 1252 ArrayList<ArrayList<StreamConfiguration>> streamConfigList = 1253 new ArrayList<ArrayList<StreamConfiguration>>(); 1254 ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList = 1255 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1256 ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList = 1257 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1258 boolean[] supportsPrivate = 1259 new boolean[RecommendedStreamConfigurationMap.MAX_USECASE_COUNT]; 1260 try { 1261 if (configurations != null) { 1262 parseRecommendedConfigurations(configurations, fullMap, /*isDepth*/ false, 1263 streamConfigList, streamDurationList, streamStallList, supportsPrivate); 1264 } 1265 } catch (IllegalArgumentException e) { 1266 Log.e(TAG, "Failed parsing the recommended stream configurations!"); 1267 return null; 1268 } 1269 1270 ArrayList<ArrayList<StreamConfiguration>> depthStreamConfigList = 1271 new ArrayList<ArrayList<StreamConfiguration>>(); 1272 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamDurationList = 1273 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1274 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamStallList = 1275 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1276 if (depthConfigurations != null) { 1277 try { 1278 parseRecommendedConfigurations(depthConfigurations, fullMap, /*isDepth*/ true, 1279 depthStreamConfigList, depthStreamDurationList, depthStreamStallList, 1280 /*supportsPrivate*/ null); 1281 } catch (IllegalArgumentException e) { 1282 Log.e(TAG, "Failed parsing the recommended depth stream configurations!"); 1283 return null; 1284 } 1285 } 1286 1287 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1288 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP); 1289 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1290 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1291 boolean listHighResolution = isBurstSupported(); 1292 recommendedConfigurations.ensureCapacity( 1293 RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1294 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1295 StreamConfigurationData scData = new StreamConfigurationData(); 1296 if (configurations != null) { 1297 initializeStreamConfigurationData(streamConfigList.get(i), 1298 streamDurationList.get(i), streamStallList.get(i), scData); 1299 } 1300 1301 StreamConfigurationData depthScData = new StreamConfigurationData(); 1302 if (depthConfigurations != null) { 1303 initializeStreamConfigurationData(depthStreamConfigList.get(i), 1304 depthStreamDurationList.get(i), depthStreamStallList.get(i), depthScData); 1305 } 1306 1307 if ((scData.streamConfigurationArray == null || 1308 scData.streamConfigurationArray.length == 0) && 1309 (depthScData.streamConfigurationArray == null || 1310 depthScData.streamConfigurationArray.length == 0)) { 1311 recommendedConfigurations.add(null); 1312 continue; 1313 } 1314 1315 // Dynamic depth streams involve alot of SW processing and currently cannot be 1316 // recommended. 1317 StreamConfigurationMap map = null; 1318 switch (i) { 1319 case RecommendedStreamConfigurationMap.USECASE_PREVIEW: 1320 case RecommendedStreamConfigurationMap.USECASE_RAW: 1321 case RecommendedStreamConfigurationMap.USECASE_LOW_LATENCY_SNAPSHOT: 1322 case RecommendedStreamConfigurationMap.USECASE_VIDEO_SNAPSHOT: 1323 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1324 scData.minDurationArray, scData.stallDurationArray, 1325 /*depthconfiguration*/ null, /*depthminduration*/ null, 1326 /*depthstallduration*/ null, 1327 /*dynamicDepthConfigurations*/ null, 1328 /*dynamicDepthMinFrameDurations*/ null, 1329 /*dynamicDepthStallDurations*/ null, 1330 /*heicconfiguration*/ null, 1331 /*heicminduration*/ null, 1332 /*heicstallduration*/ null, 1333 /*highspeedvideoconfigurations*/ null, 1334 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1335 break; 1336 case RecommendedStreamConfigurationMap.USECASE_RECORD: 1337 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1338 scData.minDurationArray, scData.stallDurationArray, 1339 /*depthconfiguration*/ null, /*depthminduration*/ null, 1340 /*depthstallduration*/ null, 1341 /*dynamicDepthConfigurations*/ null, 1342 /*dynamicDepthMinFrameDurations*/ null, 1343 /*dynamicDepthStallDurations*/ null, 1344 /*heicconfiguration*/ null, 1345 /*heicminduration*/ null, 1346 /*heicstallduration*/ null, 1347 highSpeedVideoConfigurations, 1348 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1349 break; 1350 case RecommendedStreamConfigurationMap.USECASE_ZSL: 1351 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1352 scData.minDurationArray, scData.stallDurationArray, 1353 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1354 depthScData.stallDurationArray, 1355 /*dynamicDepthConfigurations*/ null, 1356 /*dynamicDepthMinFrameDurations*/ null, 1357 /*dynamicDepthStallDurations*/ null, 1358 /*heicconfiguration*/ null, 1359 /*heicminduration*/ null, 1360 /*heicstallduration*/ null, 1361 /*highSpeedVideoConfigurations*/ null, 1362 inputOutputFormatsMap, listHighResolution, supportsPrivate[i]); 1363 break; 1364 default: 1365 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1366 scData.minDurationArray, scData.stallDurationArray, 1367 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1368 depthScData.stallDurationArray, 1369 /*dynamicDepthConfigurations*/ null, 1370 /*dynamicDepthMinFrameDurations*/ null, 1371 /*dynamicDepthStallDurations*/ null, 1372 /*heicconfiguration*/ null, 1373 /*heicminduration*/ null, 1374 /*heicstallduration*/ null, 1375 /*highSpeedVideoConfigurations*/ null, 1376 /*inputOutputFormatsMap*/ null, listHighResolution, supportsPrivate[i]); 1377 } 1378 1379 recommendedConfigurations.add(new RecommendedStreamConfigurationMap(map, /*usecase*/i, 1380 supportsPrivate[i])); 1381 } 1382 1383 return recommendedConfigurations; 1384 } 1385 isCapabilitySupported(int capabilityRequested)1386 private boolean isCapabilitySupported(int capabilityRequested) { 1387 boolean ret = false; 1388 1389 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1390 for (int capability : capabilities) { 1391 if (capabilityRequested == capability) { 1392 ret = true; 1393 break; 1394 } 1395 } 1396 1397 return ret; 1398 } 1399 1400 /** 1401 * @hide 1402 */ isUltraHighResolutionSensor()1403 public boolean isUltraHighResolutionSensor() { 1404 return isCapabilitySupported( 1405 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR); 1406 1407 } isBurstSupported()1408 private boolean isBurstSupported() { 1409 return isCapabilitySupported( 1410 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE); 1411 } 1412 isPreviewStabilizationSupported()1413 private boolean isPreviewStabilizationSupported() { 1414 boolean ret = false; 1415 1416 int[] videoStabilizationModes = 1417 getBase(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); 1418 if (videoStabilizationModes == null) { 1419 return false; 1420 } 1421 for (int mode : videoStabilizationModes) { 1422 if (mode == CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION) { 1423 ret = true; 1424 break; 1425 } 1426 } 1427 1428 return ret; 1429 } 1430 getMandatoryStreamCombinationsHelper( int mandatoryStreamsType)1431 private MandatoryStreamCombination[] getMandatoryStreamCombinationsHelper( 1432 int mandatoryStreamsType) { 1433 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1434 ArrayList<Integer> caps = new ArrayList<Integer>(); 1435 caps.ensureCapacity(capabilities.length); 1436 for (int c : capabilities) { 1437 caps.add(new Integer(c)); 1438 } 1439 int hwLevel = getBase(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); 1440 MandatoryStreamCombination.Builder build = new MandatoryStreamCombination.Builder( 1441 mCameraId, hwLevel, mDisplaySize, caps, getStreamConfigurationMap(), 1442 getStreamConfigurationMapMaximumResolution(), isPreviewStabilizationSupported()); 1443 1444 List<MandatoryStreamCombination> combs = null; 1445 switch (mandatoryStreamsType) { 1446 case MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT: 1447 combs = build.getAvailableMandatoryConcurrentStreamCombinations(); 1448 break; 1449 case MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION: 1450 combs = build.getAvailableMandatoryMaximumResolutionStreamCombinations(); 1451 break; 1452 case MANDATORY_STREAM_CONFIGURATIONS_10BIT: 1453 combs = build.getAvailableMandatory10BitStreamCombinations(); 1454 break; 1455 case MANDATORY_STREAM_CONFIGURATIONS_USE_CASE: 1456 combs = build.getAvailableMandatoryStreamUseCaseCombinations(); 1457 break; 1458 case MANDATORY_STREAM_CONFIGURATIONS_PREVIEW_STABILIZATION: 1459 combs = build.getAvailableMandatoryPreviewStabilizedStreamCombinations(); 1460 break; 1461 default: 1462 combs = build.getAvailableMandatoryStreamCombinations(); 1463 } 1464 if ((combs != null) && (!combs.isEmpty())) { 1465 MandatoryStreamCombination[] combArray = new MandatoryStreamCombination[combs.size()]; 1466 combArray = combs.toArray(combArray); 1467 return combArray; 1468 } 1469 return null; 1470 } 1471 getMandatory10BitStreamCombinations()1472 private MandatoryStreamCombination[] getMandatory10BitStreamCombinations() { 1473 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_10BIT); 1474 } 1475 getMandatoryConcurrentStreamCombinations()1476 private MandatoryStreamCombination[] getMandatoryConcurrentStreamCombinations() { 1477 if (!mHasMandatoryConcurrentStreams) { 1478 return null; 1479 } 1480 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT); 1481 } 1482 getMandatoryMaximumResolutionStreamCombinations()1483 private MandatoryStreamCombination[] getMandatoryMaximumResolutionStreamCombinations() { 1484 if (!isUltraHighResolutionSensor()) { 1485 return null; 1486 } 1487 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION); 1488 } 1489 getMandatoryStreamCombinations()1490 private MandatoryStreamCombination[] getMandatoryStreamCombinations() { 1491 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_DEFAULT); 1492 } 1493 getMandatoryUseCaseStreamCombinations()1494 private MandatoryStreamCombination[] getMandatoryUseCaseStreamCombinations() { 1495 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_USE_CASE); 1496 } 1497 getMandatoryPreviewStabilizationStreamCombinations()1498 private MandatoryStreamCombination[] getMandatoryPreviewStabilizationStreamCombinations() { 1499 return getMandatoryStreamCombinationsHelper( 1500 MANDATORY_STREAM_CONFIGURATIONS_PREVIEW_STABILIZATION); 1501 } 1502 getStreamConfigurationMap()1503 private StreamConfigurationMap getStreamConfigurationMap() { 1504 StreamConfiguration[] configurations = getBase( 1505 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS); 1506 StreamConfigurationDuration[] minFrameDurations = getBase( 1507 CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS); 1508 StreamConfigurationDuration[] stallDurations = getBase( 1509 CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS); 1510 StreamConfiguration[] depthConfigurations = getBase( 1511 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS); 1512 StreamConfigurationDuration[] depthMinFrameDurations = getBase( 1513 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS); 1514 StreamConfigurationDuration[] depthStallDurations = getBase( 1515 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS); 1516 StreamConfiguration[] dynamicDepthConfigurations = getBase( 1517 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS); 1518 StreamConfigurationDuration[] dynamicDepthMinFrameDurations = getBase( 1519 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS); 1520 StreamConfigurationDuration[] dynamicDepthStallDurations = getBase( 1521 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS); 1522 StreamConfiguration[] heicConfigurations = getBase( 1523 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS); 1524 StreamConfigurationDuration[] heicMinFrameDurations = getBase( 1525 CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS); 1526 StreamConfigurationDuration[] heicStallDurations = getBase( 1527 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS); 1528 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1529 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1530 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1531 CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP); 1532 boolean listHighResolution = isBurstSupported(); 1533 return new StreamConfigurationMap( 1534 configurations, minFrameDurations, stallDurations, 1535 depthConfigurations, depthMinFrameDurations, depthStallDurations, 1536 dynamicDepthConfigurations, dynamicDepthMinFrameDurations, 1537 dynamicDepthStallDurations, heicConfigurations, 1538 heicMinFrameDurations, heicStallDurations, 1539 highSpeedVideoConfigurations, inputOutputFormatsMap, 1540 listHighResolution); 1541 } 1542 getStreamConfigurationMapMaximumResolution()1543 private StreamConfigurationMap getStreamConfigurationMapMaximumResolution() { 1544 if (!isUltraHighResolutionSensor()) { 1545 return null; 1546 } 1547 StreamConfiguration[] configurations = getBase( 1548 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1549 StreamConfigurationDuration[] minFrameDurations = getBase( 1550 CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1551 StreamConfigurationDuration[] stallDurations = getBase( 1552 CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1553 StreamConfiguration[] depthConfigurations = getBase( 1554 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1555 StreamConfigurationDuration[] depthMinFrameDurations = getBase( 1556 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1557 StreamConfigurationDuration[] depthStallDurations = getBase( 1558 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1559 StreamConfiguration[] dynamicDepthConfigurations = getBase( 1560 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1561 StreamConfigurationDuration[] dynamicDepthMinFrameDurations = getBase( 1562 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1563 StreamConfigurationDuration[] dynamicDepthStallDurations = getBase( 1564 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1565 StreamConfiguration[] heicConfigurations = getBase( 1566 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1567 StreamConfigurationDuration[] heicMinFrameDurations = getBase( 1568 CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1569 StreamConfigurationDuration[] heicStallDurations = getBase( 1570 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1571 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1572 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1573 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1574 CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION); 1575 // TODO: Is this correct, burst capability shouldn't necessarily correspond to max res mode 1576 boolean listHighResolution = isBurstSupported(); 1577 return new StreamConfigurationMap( 1578 configurations, minFrameDurations, stallDurations, 1579 depthConfigurations, depthMinFrameDurations, depthStallDurations, 1580 dynamicDepthConfigurations, dynamicDepthMinFrameDurations, 1581 dynamicDepthStallDurations, heicConfigurations, 1582 heicMinFrameDurations, heicStallDurations, 1583 highSpeedVideoConfigurations, inputOutputFormatsMap, 1584 listHighResolution, false); 1585 } 1586 getMaxRegions(Key<T> key)1587 private <T> Integer getMaxRegions(Key<T> key) { 1588 final int AE = 0; 1589 final int AWB = 1; 1590 final int AF = 2; 1591 1592 // The order of the elements is: (AE, AWB, AF) 1593 int[] maxRegions = getBase(CameraCharacteristics.CONTROL_MAX_REGIONS); 1594 1595 if (maxRegions == null) { 1596 return null; 1597 } 1598 1599 if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AE)) { 1600 return maxRegions[AE]; 1601 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB)) { 1602 return maxRegions[AWB]; 1603 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AF)) { 1604 return maxRegions[AF]; 1605 } else { 1606 throw new AssertionError("Invalid key " + key); 1607 } 1608 } 1609 getMaxNumOutputs(Key<T> key)1610 private <T> Integer getMaxNumOutputs(Key<T> key) { 1611 final int RAW = 0; 1612 final int PROC = 1; 1613 final int PROC_STALLING = 2; 1614 1615 // The order of the elements is: (raw, proc+nonstalling, proc+stalling) 1616 int[] maxNumOutputs = getBase(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS); 1617 1618 if (maxNumOutputs == null) { 1619 return null; 1620 } 1621 1622 if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)) { 1623 return maxNumOutputs[RAW]; 1624 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)) { 1625 return maxNumOutputs[PROC]; 1626 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)) { 1627 return maxNumOutputs[PROC_STALLING]; 1628 } else { 1629 throw new AssertionError("Invalid key " + key); 1630 } 1631 } 1632 getTonemapCurve()1633 private <T> TonemapCurve getTonemapCurve() { 1634 float[] red = getBase(CaptureRequest.TONEMAP_CURVE_RED); 1635 float[] green = getBase(CaptureRequest.TONEMAP_CURVE_GREEN); 1636 float[] blue = getBase(CaptureRequest.TONEMAP_CURVE_BLUE); 1637 1638 if (areValuesAllNull(red, green, blue)) { 1639 return null; 1640 } 1641 1642 if (red == null || green == null || blue == null) { 1643 Log.w(TAG, "getTonemapCurve - missing tone curve components"); 1644 return null; 1645 } 1646 TonemapCurve tc = new TonemapCurve(red, green, blue); 1647 return tc; 1648 } 1649 getOisSamples()1650 private OisSample[] getOisSamples() { 1651 long[] timestamps = getBase(CaptureResult.STATISTICS_OIS_TIMESTAMPS); 1652 float[] xShifts = getBase(CaptureResult.STATISTICS_OIS_X_SHIFTS); 1653 float[] yShifts = getBase(CaptureResult.STATISTICS_OIS_Y_SHIFTS); 1654 1655 if (timestamps == null) { 1656 if (xShifts != null) { 1657 throw new AssertionError("timestamps is null but xShifts is not"); 1658 } 1659 1660 if (yShifts != null) { 1661 throw new AssertionError("timestamps is null but yShifts is not"); 1662 } 1663 1664 return null; 1665 } 1666 1667 if (xShifts == null) { 1668 throw new AssertionError("timestamps is not null but xShifts is"); 1669 } 1670 1671 if (yShifts == null) { 1672 throw new AssertionError("timestamps is not null but yShifts is"); 1673 } 1674 1675 if (xShifts.length != timestamps.length) { 1676 throw new AssertionError(String.format( 1677 "timestamps has %d entries but xShifts has %d", timestamps.length, 1678 xShifts.length)); 1679 } 1680 1681 if (yShifts.length != timestamps.length) { 1682 throw new AssertionError(String.format( 1683 "timestamps has %d entries but yShifts has %d", timestamps.length, 1684 yShifts.length)); 1685 } 1686 1687 OisSample[] samples = new OisSample[timestamps.length]; 1688 for (int i = 0; i < timestamps.length; i++) { 1689 samples[i] = new OisSample(timestamps[i], xShifts[i], yShifts[i]); 1690 } 1691 return samples; 1692 } 1693 getExtendedSceneModeCapabilities()1694 private Capability[] getExtendedSceneModeCapabilities() { 1695 int[] maxSizes = 1696 getBase(CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES); 1697 float[] zoomRanges = getBase( 1698 CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES); 1699 Range<Float> zoomRange = getBase(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE); 1700 float maxDigitalZoom = getBase(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); 1701 1702 if (maxSizes == null) { 1703 return null; 1704 } 1705 if (maxSizes.length % 3 != 0) { 1706 throw new AssertionError("availableExtendedSceneModeMaxSizes must be tuples of " 1707 + "[mode, width, height]"); 1708 } 1709 int numExtendedSceneModes = maxSizes.length / 3; 1710 int numExtendedSceneModeZoomRanges = 0; 1711 if (zoomRanges != null) { 1712 if (zoomRanges.length % 2 != 0) { 1713 throw new AssertionError("availableExtendedSceneModeZoomRanges must be tuples of " 1714 + "[minZoom, maxZoom]"); 1715 } 1716 numExtendedSceneModeZoomRanges = zoomRanges.length / 2; 1717 if (numExtendedSceneModes - numExtendedSceneModeZoomRanges != 1) { 1718 throw new AssertionError("Number of extended scene mode zoom ranges must be 1 " 1719 + "less than number of supported modes"); 1720 } 1721 } 1722 1723 float modeOffMinZoomRatio = 1.0f; 1724 float modeOffMaxZoomRatio = maxDigitalZoom; 1725 if (zoomRange != null) { 1726 modeOffMinZoomRatio = zoomRange.getLower(); 1727 modeOffMaxZoomRatio = zoomRange.getUpper(); 1728 } 1729 1730 Capability[] capabilities = new Capability[numExtendedSceneModes]; 1731 for (int i = 0, j = 0; i < numExtendedSceneModes; i++) { 1732 int mode = maxSizes[3 * i]; 1733 int width = maxSizes[3 * i + 1]; 1734 int height = maxSizes[3 * i + 2]; 1735 if (mode != CameraMetadata.CONTROL_EXTENDED_SCENE_MODE_DISABLED 1736 && j < numExtendedSceneModeZoomRanges) { 1737 capabilities[i] = new Capability(mode, width, height, zoomRanges[2 * j], 1738 zoomRanges[2 * j + 1]); 1739 j++; 1740 } else { 1741 capabilities[i] = new Capability(mode, width, height, modeOffMinZoomRatio, 1742 modeOffMaxZoomRatio); 1743 } 1744 } 1745 1746 return capabilities; 1747 } 1748 setBase(CameraCharacteristics.Key<T> key, T value)1749 private <T> void setBase(CameraCharacteristics.Key<T> key, T value) { 1750 setBase(key.getNativeKey(), value); 1751 } 1752 setBase(CaptureResult.Key<T> key, T value)1753 private <T> void setBase(CaptureResult.Key<T> key, T value) { 1754 setBase(key.getNativeKey(), value); 1755 } 1756 setBase(CaptureRequest.Key<T> key, T value)1757 private <T> void setBase(CaptureRequest.Key<T> key, T value) { 1758 setBase(key.getNativeKey(), value); 1759 } 1760 setBase(Key<T> key, T value)1761 private <T> void setBase(Key<T> key, T value) { 1762 int tag; 1763 if (key.hasTag()) { 1764 tag = key.getTag(); 1765 } else { 1766 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.getName()); 1767 key.cacheTag(tag); 1768 } 1769 if (value == null) { 1770 // Erase the entry 1771 writeValues(tag, /*src*/null); 1772 return; 1773 } // else update the entry to a new value 1774 1775 int nativeType = nativeGetTypeFromTagLocal(mMetadataPtr, tag); 1776 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 1777 int size = marshaler.calculateMarshalSize(value); 1778 1779 // TODO: Optimization. Cache the byte[] and reuse if the size is big enough. 1780 byte[] values = new byte[size]; 1781 1782 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 1783 marshaler.marshal(value, buffer); 1784 1785 writeValues(tag, values); 1786 } 1787 1788 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 1789 // metadata. 1790 private static final HashMap<Key<?>, SetCommand> sSetCommandMap = 1791 new HashMap<Key<?>, SetCommand>(); 1792 static { CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey()1793 sSetCommandMap.put(CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), 1794 new SetCommand() { 1795 @Override 1796 public <T> void setValue(CameraMetadataNative metadata, T value) { 1797 metadata.setAvailableFormats((int[]) value); 1798 } 1799 }); CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey()1800 sSetCommandMap.put(CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), 1801 new SetCommand() { 1802 @Override 1803 public <T> void setValue(CameraMetadataNative metadata, T value) { 1804 metadata.setFaceRectangles((Rect[]) value); 1805 } 1806 }); CaptureResult.STATISTICS_FACES.getNativeKey()1807 sSetCommandMap.put(CaptureResult.STATISTICS_FACES.getNativeKey(), 1808 new SetCommand() { 1809 @Override 1810 public <T> void setValue(CameraMetadataNative metadata, T value) { 1811 metadata.setFaces((Face[])value); 1812 } 1813 }); CaptureRequest.TONEMAP_CURVE.getNativeKey()1814 sSetCommandMap.put(CaptureRequest.TONEMAP_CURVE.getNativeKey(), new SetCommand() { 1815 @Override 1816 public <T> void setValue(CameraMetadataNative metadata, T value) { 1817 metadata.setTonemapCurve((TonemapCurve) value); 1818 } 1819 }); CaptureResult.JPEG_GPS_LOCATION.getNativeKey()1820 sSetCommandMap.put(CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new SetCommand() { 1821 @Override 1822 public <T> void setValue(CameraMetadataNative metadata, T value) { 1823 metadata.setGpsLocation((Location) value); 1824 } 1825 }); CaptureRequest.SCALER_CROP_REGION.getNativeKey()1826 sSetCommandMap.put(CaptureRequest.SCALER_CROP_REGION.getNativeKey(), 1827 new SetCommand() { 1828 @Override 1829 public <T> void setValue(CameraMetadataNative metadata, T value) { 1830 metadata.setScalerCropRegion((Rect) value); 1831 } 1832 }); CaptureRequest.CONTROL_AWB_REGIONS.getNativeKey()1833 sSetCommandMap.put(CaptureRequest.CONTROL_AWB_REGIONS.getNativeKey(), 1834 new SetCommand() { 1835 @Override 1836 public <T> void setValue(CameraMetadataNative metadata, T value) { 1837 metadata.setAWBRegions(value); 1838 } 1839 }); CaptureRequest.CONTROL_AF_REGIONS.getNativeKey()1840 sSetCommandMap.put(CaptureRequest.CONTROL_AF_REGIONS.getNativeKey(), 1841 new SetCommand() { 1842 @Override 1843 public <T> void setValue(CameraMetadataNative metadata, T value) { 1844 metadata.setAFRegions(value); 1845 } 1846 }); CaptureRequest.CONTROL_AE_REGIONS.getNativeKey()1847 sSetCommandMap.put(CaptureRequest.CONTROL_AE_REGIONS.getNativeKey(), 1848 new SetCommand() { 1849 @Override 1850 public <T> void setValue(CameraMetadataNative metadata, T value) { 1851 metadata.setAERegions(value); 1852 } 1853 }); 1854 } 1855 setAvailableFormats(int[] value)1856 private boolean setAvailableFormats(int[] value) { 1857 int[] availableFormat = value; 1858 if (value == null) { 1859 // Let setBase() to handle the null value case. 1860 return false; 1861 } 1862 1863 int[] newValues = new int[availableFormat.length]; 1864 for (int i = 0; i < availableFormat.length; i++) { 1865 newValues[i] = availableFormat[i]; 1866 if (availableFormat[i] == ImageFormat.JPEG) { 1867 newValues[i] = NATIVE_JPEG_FORMAT; 1868 } 1869 } 1870 1871 setBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS, newValues); 1872 return true; 1873 } 1874 1875 /** 1876 * Convert Face Rectangles from managed side to native side as they have different definitions. 1877 * <p> 1878 * Managed side face rectangles are defined as: left, top, width, height. 1879 * Native side face rectangles are defined as: left, top, right, bottom. 1880 * The input face rectangle need to be converted to native side definition when set is called. 1881 * </p> 1882 * 1883 * @param faceRects Input face rectangles. 1884 * @return true if face rectangles can be set successfully. Otherwise, Let the caller 1885 * (setBase) to handle it appropriately. 1886 */ setFaceRectangles(Rect[] faceRects)1887 private boolean setFaceRectangles(Rect[] faceRects) { 1888 if (faceRects == null) { 1889 return false; 1890 } 1891 1892 Rect[] newFaceRects = new Rect[faceRects.length]; 1893 for (int i = 0; i < newFaceRects.length; i++) { 1894 newFaceRects[i] = new Rect( 1895 faceRects[i].left, 1896 faceRects[i].top, 1897 faceRects[i].right + faceRects[i].left, 1898 faceRects[i].bottom + faceRects[i].top); 1899 } 1900 1901 setBase(CaptureResult.STATISTICS_FACE_RECTANGLES, newFaceRects); 1902 return true; 1903 } 1904 setTonemapCurve(TonemapCurve tc)1905 private <T> boolean setTonemapCurve(TonemapCurve tc) { 1906 if (tc == null) { 1907 return false; 1908 } 1909 1910 float[][] curve = new float[3][]; 1911 for (int i = TonemapCurve.CHANNEL_RED; i <= TonemapCurve.CHANNEL_BLUE; i++) { 1912 int pointCount = tc.getPointCount(i); 1913 curve[i] = new float[pointCount * TonemapCurve.POINT_SIZE]; 1914 tc.copyColorCurve(i, curve[i], 0); 1915 } 1916 setBase(CaptureRequest.TONEMAP_CURVE_RED, curve[0]); 1917 setBase(CaptureRequest.TONEMAP_CURVE_GREEN, curve[1]); 1918 setBase(CaptureRequest.TONEMAP_CURVE_BLUE, curve[2]); 1919 1920 return true; 1921 } 1922 setScalerCropRegion(Rect cropRegion)1923 private <T> boolean setScalerCropRegion(Rect cropRegion) { 1924 if (cropRegion == null) { 1925 return false; 1926 } 1927 setBase(CaptureRequest.SCALER_CROP_REGION_SET, true); 1928 setBase(CaptureRequest.SCALER_CROP_REGION, cropRegion); 1929 return true; 1930 } 1931 setAFRegions(T afRegions)1932 private <T> boolean setAFRegions(T afRegions) { 1933 if (afRegions == null) { 1934 return false; 1935 } 1936 setBase(CaptureRequest.CONTROL_AF_REGIONS_SET, true); 1937 // The cast to CaptureRequest.Key is needed since java does not support template 1938 // specialization and we need to route this method to 1939 // setBase(CaptureRequest.Key<T> key, T value) 1940 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AF_REGIONS, afRegions); 1941 return true; 1942 } 1943 setAERegions(T aeRegions)1944 private <T> boolean setAERegions(T aeRegions) { 1945 if (aeRegions == null) { 1946 return false; 1947 } 1948 setBase(CaptureRequest.CONTROL_AE_REGIONS_SET, true); 1949 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AE_REGIONS, aeRegions); 1950 return true; 1951 } 1952 setAWBRegions(T awbRegions)1953 private <T> boolean setAWBRegions(T awbRegions) { 1954 if (awbRegions == null) { 1955 return false; 1956 } 1957 setBase(CaptureRequest.CONTROL_AWB_REGIONS_SET, true); 1958 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AWB_REGIONS, awbRegions); 1959 return true; 1960 } 1961 updateNativeAllocation()1962 private void updateNativeAllocation() { 1963 long currentBufferSize = nativeGetBufferSize(mMetadataPtr); 1964 1965 if (currentBufferSize != mBufferSize) { 1966 if (mBufferSize > 0) { 1967 VMRuntime.getRuntime().registerNativeFree(mBufferSize); 1968 } 1969 1970 mBufferSize = currentBufferSize; 1971 1972 if (mBufferSize > 0) { 1973 VMRuntime.getRuntime().registerNativeAllocation(mBufferSize); 1974 } 1975 } 1976 } 1977 1978 private int mCameraId = -1; 1979 private boolean mHasMandatoryConcurrentStreams = false; 1980 private Size mDisplaySize = new Size(0, 0); 1981 private long mBufferSize = 0; 1982 private MultiResolutionStreamConfigurationMap mMultiResolutionStreamConfigurationMap = null; 1983 1984 /** 1985 * Set the current camera Id. 1986 * 1987 * @param cameraId Current camera id. 1988 * 1989 * @hide 1990 */ setCameraId(int cameraId)1991 public void setCameraId(int cameraId) { 1992 mCameraId = cameraId; 1993 } 1994 1995 /** 1996 * Set the current camera Id. 1997 * 1998 * @param hasMandatoryConcurrentStreams whether the metadata advertises mandatory concurrent 1999 * streams. 2000 * 2001 * @hide 2002 */ setHasMandatoryConcurrentStreams(boolean hasMandatoryConcurrentStreams)2003 public void setHasMandatoryConcurrentStreams(boolean hasMandatoryConcurrentStreams) { 2004 mHasMandatoryConcurrentStreams = hasMandatoryConcurrentStreams; 2005 } 2006 2007 /** 2008 * Set the current display size. 2009 * 2010 * @param displaySize The current display size. 2011 * 2012 * @hide 2013 */ setDisplaySize(Size displaySize)2014 public void setDisplaySize(Size displaySize) { 2015 mDisplaySize = displaySize; 2016 } 2017 2018 /** 2019 * Set the multi-resolution stream configuration map. 2020 * 2021 * @param multiResolutionMap The multi-resolution stream configuration map. 2022 * 2023 * @hide 2024 */ setMultiResolutionStreamConfigurationMap( @onNull Map<String, StreamConfiguration[]> multiResolutionMap)2025 public void setMultiResolutionStreamConfigurationMap( 2026 @NonNull Map<String, StreamConfiguration[]> multiResolutionMap) { 2027 mMultiResolutionStreamConfigurationMap = 2028 new MultiResolutionStreamConfigurationMap(multiResolutionMap); 2029 } 2030 2031 /** 2032 * Get the multi-resolution stream configuration map. 2033 * 2034 * @return The multi-resolution stream configuration map. 2035 * 2036 * @hide 2037 */ getMultiResolutionStreamConfigurationMap()2038 public MultiResolutionStreamConfigurationMap getMultiResolutionStreamConfigurationMap() { 2039 return mMultiResolutionStreamConfigurationMap; 2040 } 2041 2042 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 2043 private long mMetadataPtr; // native std::shared_ptr<CameraMetadata>* 2044 2045 @FastNative nativeAllocate()2046 private static native long nativeAllocate(); 2047 @FastNative nativeAllocateCopy(long ptr)2048 private static native long nativeAllocateCopy(long ptr) 2049 throws NullPointerException; 2050 2051 2052 @FastNative nativeUpdate(long dst, long src)2053 private static native void nativeUpdate(long dst, long src); nativeWriteToParcel(Parcel dest, long ptr)2054 private static synchronized native void nativeWriteToParcel(Parcel dest, long ptr); nativeReadFromParcel(Parcel source, long ptr)2055 private static synchronized native void nativeReadFromParcel(Parcel source, long ptr); nativeSwap(long ptr, long otherPtr)2056 private static synchronized native void nativeSwap(long ptr, long otherPtr) 2057 throws NullPointerException; 2058 @FastNative nativeSetVendorId(long ptr, long vendorId)2059 private static native void nativeSetVendorId(long ptr, long vendorId); nativeClose(long ptr)2060 private static synchronized native void nativeClose(long ptr); nativeIsEmpty(long ptr)2061 private static synchronized native boolean nativeIsEmpty(long ptr); nativeGetEntryCount(long ptr)2062 private static synchronized native int nativeGetEntryCount(long ptr); nativeGetBufferSize(long ptr)2063 private static synchronized native long nativeGetBufferSize(long ptr); 2064 2065 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeReadValues(int tag, long ptr)2066 private static synchronized native byte[] nativeReadValues(int tag, long ptr); nativeWriteValues(int tag, byte[] src, long ptr)2067 private static synchronized native void nativeWriteValues(int tag, byte[] src, long ptr); nativeDump(long ptr)2068 private static synchronized native void nativeDump(long ptr) throws IOException; // dump to LOGD 2069 nativeGetAllVendorKeys(long ptr, Class keyClass)2070 private static synchronized native ArrayList nativeGetAllVendorKeys(long ptr, Class keyClass); 2071 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeGetTagFromKeyLocal(long ptr, String keyName)2072 private static synchronized native int nativeGetTagFromKeyLocal(long ptr, String keyName) 2073 throws IllegalArgumentException; 2074 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeGetTypeFromTagLocal(long ptr, int tag)2075 private static synchronized native int nativeGetTypeFromTagLocal(long ptr, int tag) 2076 throws IllegalArgumentException; 2077 @FastNative nativeGetTagFromKey(String keyName, long vendorId)2078 private static native int nativeGetTagFromKey(String keyName, long vendorId) 2079 throws IllegalArgumentException; 2080 @FastNative nativeGetTypeFromTag(int tag, long vendorId)2081 private static native int nativeGetTypeFromTag(int tag, long vendorId) 2082 throws IllegalArgumentException; 2083 2084 /** 2085 * <p>Perform a 0-copy swap of the internal metadata with another object.</p> 2086 * 2087 * <p>Useful to convert a CameraMetadata into e.g. a CaptureRequest.</p> 2088 * 2089 * @param other Metadata to swap with 2090 * @throws NullPointerException if other was null 2091 * @hide 2092 */ swap(CameraMetadataNative other)2093 public void swap(CameraMetadataNative other) { 2094 nativeSwap(mMetadataPtr, other.mMetadataPtr); 2095 mCameraId = other.mCameraId; 2096 mHasMandatoryConcurrentStreams = other.mHasMandatoryConcurrentStreams; 2097 mDisplaySize = other.mDisplaySize; 2098 mMultiResolutionStreamConfigurationMap = other.mMultiResolutionStreamConfigurationMap; 2099 updateNativeAllocation(); 2100 other.updateNativeAllocation(); 2101 } 2102 2103 /** 2104 * Set the native metadata vendor id. 2105 * 2106 * @hide 2107 */ setVendorId(long vendorId)2108 public void setVendorId(long vendorId) { 2109 nativeSetVendorId(mMetadataPtr, vendorId); 2110 } 2111 2112 /** 2113 * @hide 2114 */ getEntryCount()2115 public int getEntryCount() { 2116 return nativeGetEntryCount(mMetadataPtr); 2117 } 2118 2119 /** 2120 * Does this metadata contain at least 1 entry? 2121 * 2122 * @hide 2123 */ isEmpty()2124 public boolean isEmpty() { 2125 return nativeIsEmpty(mMetadataPtr); 2126 } 2127 2128 2129 /** 2130 * Retrieves the pointer to the native shared_ptr<CameraMetadata> as a Java long. 2131 * 2132 * @hide 2133 */ getMetadataPtr()2134 public long getMetadataPtr() { 2135 return mMetadataPtr; 2136 } 2137 2138 /** 2139 * Return a list containing keys of the given key class for all defined vendor tags. 2140 * 2141 * @hide 2142 */ getAllVendorKeys(Class<K> keyClass)2143 public <K> ArrayList<K> getAllVendorKeys(Class<K> keyClass) { 2144 if (keyClass == null) { 2145 throw new NullPointerException(); 2146 } 2147 return (ArrayList<K>) nativeGetAllVendorKeys(mMetadataPtr, keyClass); 2148 } 2149 2150 /** 2151 * Convert a key string into the equivalent native tag. 2152 * 2153 * @throws IllegalArgumentException if the key was not recognized 2154 * @throws NullPointerException if the key was null 2155 * 2156 * @hide 2157 */ getTag(String key)2158 public static int getTag(String key) { 2159 return nativeGetTagFromKey(key, Long.MAX_VALUE); 2160 } 2161 2162 /** 2163 * Convert a key string into the equivalent native tag. 2164 * 2165 * @throws IllegalArgumentException if the key was not recognized 2166 * @throws NullPointerException if the key was null 2167 * 2168 * @hide 2169 */ getTag(String key, long vendorId)2170 public static int getTag(String key, long vendorId) { 2171 return nativeGetTagFromKey(key, vendorId); 2172 } 2173 2174 /** 2175 * Get the underlying native type for a tag. 2176 * 2177 * @param tag An integer tag, see e.g. {@link #getTag} 2178 * @param vendorId A vendor tag provider id 2179 * @return An int enum for the metadata type, see e.g. {@link #TYPE_BYTE} 2180 * 2181 * @hide 2182 */ getNativeType(int tag, long vendorId)2183 public static int getNativeType(int tag, long vendorId) { 2184 return nativeGetTypeFromTag(tag, vendorId); 2185 } 2186 2187 /** 2188 * <p>Updates the existing entry for tag with the new bytes pointed by src, erasing 2189 * the entry if src was null.</p> 2190 * 2191 * <p>An empty array can be passed in to update the entry to 0 elements.</p> 2192 * 2193 * @param tag An integer tag, see e.g. {@link #getTag} 2194 * @param src An array of bytes, or null to erase the entry 2195 * 2196 * @hide 2197 */ writeValues(int tag, byte[] src)2198 public void writeValues(int tag, byte[] src) { 2199 nativeWriteValues(tag, src, mMetadataPtr); 2200 } 2201 2202 /** 2203 * <p>Returns a byte[] of data corresponding to this tag. Use a wrapped bytebuffer to unserialize 2204 * the data properly.</p> 2205 * 2206 * <p>An empty array can be returned to denote an existing entry with 0 elements.</p> 2207 * 2208 * @param tag An integer tag, see e.g. {@link #getTag} 2209 * 2210 * @return {@code null} if there were 0 entries for this tag, a byte[] otherwise. 2211 * @hide 2212 */ readValues(int tag)2213 public byte[] readValues(int tag) { 2214 // TODO: Optimization. Native code returns a ByteBuffer instead. 2215 return nativeReadValues(tag, mMetadataPtr); 2216 } 2217 2218 /** 2219 * Dumps the native metadata contents to logcat. 2220 * 2221 * <p>Visibility for testing/debugging only. The results will not 2222 * include any synthesized keys, as they are invisible to the native layer.</p> 2223 * 2224 * @hide 2225 */ dumpToLog()2226 public void dumpToLog() { 2227 try { 2228 nativeDump(mMetadataPtr); 2229 } catch (IOException e) { 2230 Log.wtf(TAG, "Dump logging failed", e); 2231 } 2232 } 2233 2234 @Override finalize()2235 protected void finalize() throws Throwable { 2236 try { 2237 close(); 2238 } finally { 2239 super.finalize(); 2240 } 2241 } 2242 2243 /** 2244 * Get the marshaler compatible with the {@code key} and type {@code T}. 2245 * 2246 * @throws UnsupportedOperationException 2247 * if the native/managed type combination for {@code key} is not supported 2248 */ getMarshalerForKey(Key<T> key, int nativeType)2249 private static <T> Marshaler<T> getMarshalerForKey(Key<T> key, int nativeType) { 2250 return MarshalRegistry.getMarshaler(key.getTypeReference(), 2251 nativeType); 2252 } 2253 2254 @SuppressWarnings({ "unchecked", "rawtypes" }) registerAllMarshalers()2255 private static void registerAllMarshalers() { 2256 if (DEBUG) { 2257 Log.v(TAG, "Shall register metadata marshalers"); 2258 } 2259 2260 MarshalQueryable[] queryList = new MarshalQueryable[] { 2261 // marshalers for standard types 2262 new MarshalQueryablePrimitive(), 2263 new MarshalQueryableEnum(), 2264 new MarshalQueryableArray(), 2265 2266 // pseudo standard types, that expand/narrow the native type into a managed type 2267 new MarshalQueryableBoolean(), 2268 new MarshalQueryableNativeByteToInteger(), 2269 2270 // marshalers for custom types 2271 new MarshalQueryableRect(), 2272 new MarshalQueryableSize(), 2273 new MarshalQueryableSizeF(), 2274 new MarshalQueryableString(), 2275 new MarshalQueryableReprocessFormatsMap(), 2276 new MarshalQueryableRange(), 2277 new MarshalQueryablePair(), 2278 new MarshalQueryableMeteringRectangle(), 2279 new MarshalQueryableColorSpaceTransform(), 2280 new MarshalQueryableStreamConfiguration(), 2281 new MarshalQueryableStreamConfigurationDuration(), 2282 new MarshalQueryableRggbChannelVector(), 2283 new MarshalQueryableBlackLevelPattern(), 2284 new MarshalQueryableHighSpeedVideoConfiguration(), 2285 new MarshalQueryableRecommendedStreamConfiguration(), 2286 2287 // generic parcelable marshaler (MUST BE LAST since it has lowest priority) 2288 new MarshalQueryableParcelable(), 2289 }; 2290 2291 for (MarshalQueryable query : queryList) { 2292 MarshalRegistry.registerMarshalQueryable(query); 2293 } 2294 if (DEBUG) { 2295 Log.v(TAG, "Registered metadata marshalers"); 2296 } 2297 } 2298 2299 /** Check if input arguments are all {@code null}. 2300 * 2301 * @param objs Input arguments for null check 2302 * @return {@code true} if input arguments are all {@code null}, otherwise {@code false} 2303 */ areValuesAllNull(Object... objs)2304 private static boolean areValuesAllNull(Object... objs) { 2305 for (Object o : objs) { 2306 if (o != null) return false; 2307 } 2308 return true; 2309 } 2310 2311 /** 2312 * Return the set of physical camera ids that this logical {@link CameraDevice} is made 2313 * up of. 2314 * 2315 * If the camera device isn't a logical camera, return an empty set. 2316 * 2317 * @hide 2318 */ getPhysicalCameraIds()2319 public Set<String> getPhysicalCameraIds() { 2320 int[] availableCapabilities = get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 2321 if (availableCapabilities == null) { 2322 throw new AssertionError("android.request.availableCapabilities must be non-null " 2323 + "in the characteristics"); 2324 } 2325 2326 if (!ArrayUtils.contains(availableCapabilities, 2327 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA)) { 2328 return Collections.emptySet(); 2329 } 2330 byte[] physicalCamIds = get(CameraCharacteristics.LOGICAL_MULTI_CAMERA_PHYSICAL_IDS); 2331 2332 String physicalCamIdString = null; 2333 try { 2334 physicalCamIdString = new String(physicalCamIds, "UTF-8"); 2335 } catch (java.io.UnsupportedEncodingException e) { 2336 throw new AssertionError("android.logicalCam.physicalIds must be UTF-8 string"); 2337 } 2338 String[] physicalCameraIdArray = physicalCamIdString.split("\0"); 2339 2340 return Collections.unmodifiableSet( 2341 new HashSet<String>(Arrays.asList(physicalCameraIdArray))); 2342 } 2343 2344 static { registerAllMarshalers()2345 registerAllMarshalers(); 2346 } 2347 } 2348