1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.impl; 18 19 import android.annotation.NonNull; 20 import android.compat.annotation.UnsupportedAppUsage; 21 import android.graphics.ImageFormat; 22 import android.graphics.Point; 23 import android.graphics.Rect; 24 import android.hardware.camera2.CameraCharacteristics; 25 import android.hardware.camera2.CameraMetadata; 26 import android.hardware.camera2.CaptureRequest; 27 import android.hardware.camera2.CaptureResult; 28 import android.hardware.camera2.marshal.MarshalQueryable; 29 import android.hardware.camera2.marshal.MarshalRegistry; 30 import android.hardware.camera2.marshal.Marshaler; 31 import android.hardware.camera2.marshal.impl.MarshalQueryableArray; 32 import android.hardware.camera2.marshal.impl.MarshalQueryableBlackLevelPattern; 33 import android.hardware.camera2.marshal.impl.MarshalQueryableBoolean; 34 import android.hardware.camera2.marshal.impl.MarshalQueryableColorSpaceTransform; 35 import android.hardware.camera2.marshal.impl.MarshalQueryableEnum; 36 import android.hardware.camera2.marshal.impl.MarshalQueryableHighSpeedVideoConfiguration; 37 import android.hardware.camera2.marshal.impl.MarshalQueryableMeteringRectangle; 38 import android.hardware.camera2.marshal.impl.MarshalQueryableNativeByteToInteger; 39 import android.hardware.camera2.marshal.impl.MarshalQueryablePair; 40 import android.hardware.camera2.marshal.impl.MarshalQueryableParcelable; 41 import android.hardware.camera2.marshal.impl.MarshalQueryablePrimitive; 42 import android.hardware.camera2.marshal.impl.MarshalQueryableRange; 43 import android.hardware.camera2.marshal.impl.MarshalQueryableRecommendedStreamConfiguration; 44 import android.hardware.camera2.marshal.impl.MarshalQueryableRect; 45 import android.hardware.camera2.marshal.impl.MarshalQueryableReprocessFormatsMap; 46 import android.hardware.camera2.marshal.impl.MarshalQueryableRggbChannelVector; 47 import android.hardware.camera2.marshal.impl.MarshalQueryableSize; 48 import android.hardware.camera2.marshal.impl.MarshalQueryableSizeF; 49 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfiguration; 50 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfigurationDuration; 51 import android.hardware.camera2.marshal.impl.MarshalQueryableString; 52 import android.hardware.camera2.params.Capability; 53 import android.hardware.camera2.params.Face; 54 import android.hardware.camera2.params.HighSpeedVideoConfiguration; 55 import android.hardware.camera2.params.LensShadingMap; 56 import android.hardware.camera2.params.MeteringRectangle; 57 import android.hardware.camera2.params.MandatoryStreamCombination; 58 import android.hardware.camera2.params.MultiResolutionStreamConfigurationMap; 59 import android.hardware.camera2.params.OisSample; 60 import android.hardware.camera2.params.RecommendedStreamConfiguration; 61 import android.hardware.camera2.params.RecommendedStreamConfigurationMap; 62 import android.hardware.camera2.params.ReprocessFormatsMap; 63 import android.hardware.camera2.params.StreamConfiguration; 64 import android.hardware.camera2.params.StreamConfigurationDuration; 65 import android.hardware.camera2.params.StreamConfigurationMap; 66 import android.hardware.camera2.params.TonemapCurve; 67 import android.hardware.camera2.utils.ArrayUtils; 68 import android.hardware.camera2.utils.TypeReference; 69 import android.location.Location; 70 import android.location.LocationManager; 71 import android.os.Build; 72 import android.os.Parcel; 73 import android.os.Parcelable; 74 import android.os.ServiceSpecificException; 75 import android.util.Log; 76 import android.util.Range; 77 import android.util.Size; 78 79 import dalvik.annotation.optimization.FastNative; 80 import dalvik.system.VMRuntime; 81 82 import java.io.IOException; 83 import java.nio.ByteBuffer; 84 import java.nio.ByteOrder; 85 import java.util.ArrayList; 86 import java.util.Arrays; 87 import java.util.Collections; 88 import java.util.HashMap; 89 import java.util.HashSet; 90 import java.util.Map; 91 import java.util.List; 92 import java.util.Objects; 93 import java.util.Set; 94 95 /** 96 * Implementation of camera metadata marshal/unmarshal across Binder to 97 * the camera service 98 */ 99 public class CameraMetadataNative implements Parcelable { 100 101 public static class Key<T> { 102 private boolean mHasTag; 103 private int mTag; 104 private long mVendorId = Long.MAX_VALUE; 105 private final Class<T> mType; 106 private final TypeReference<T> mTypeReference; 107 private final String mName; 108 private final String mFallbackName; 109 private final int mHash; 110 111 /** 112 * @hide 113 */ Key(String name, Class<T> type, long vendorId)114 public Key(String name, Class<T> type, long vendorId) { 115 if (name == null) { 116 throw new NullPointerException("Key needs a valid name"); 117 } else if (type == null) { 118 throw new NullPointerException("Type needs to be non-null"); 119 } 120 mName = name; 121 mFallbackName = null; 122 mType = type; 123 mVendorId = vendorId; 124 mTypeReference = TypeReference.createSpecializedTypeReference(type); 125 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 126 } 127 128 /** 129 * @hide 130 */ Key(String name, String fallbackName, Class<T> type)131 public Key(String name, String fallbackName, Class<T> type) { 132 if (name == null) { 133 throw new NullPointerException("Key needs a valid name"); 134 } else if (type == null) { 135 throw new NullPointerException("Type needs to be non-null"); 136 } 137 mName = name; 138 mFallbackName = fallbackName; 139 mType = type; 140 mTypeReference = TypeReference.createSpecializedTypeReference(type); 141 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 142 } 143 144 /** 145 * Visible for testing only. 146 * 147 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 148 * for application code or vendor-extended keys.</p> 149 */ Key(String name, Class<T> type)150 public Key(String name, Class<T> type) { 151 if (name == null) { 152 throw new NullPointerException("Key needs a valid name"); 153 } else if (type == null) { 154 throw new NullPointerException("Type needs to be non-null"); 155 } 156 mName = name; 157 mFallbackName = null; 158 mType = type; 159 mTypeReference = TypeReference.createSpecializedTypeReference(type); 160 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 161 } 162 163 /** 164 * Visible for testing only. 165 * 166 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 167 * for application code or vendor-extended keys.</p> 168 */ 169 @SuppressWarnings("unchecked") Key(String name, TypeReference<T> typeReference)170 public Key(String name, TypeReference<T> typeReference) { 171 if (name == null) { 172 throw new NullPointerException("Key needs a valid name"); 173 } else if (typeReference == null) { 174 throw new NullPointerException("TypeReference needs to be non-null"); 175 } 176 mName = name; 177 mFallbackName = null; 178 mType = (Class<T>)typeReference.getRawType(); 179 mTypeReference = typeReference; 180 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 181 } 182 183 /** 184 * Return a camelCase, period separated name formatted like: 185 * {@code "root.section[.subsections].name"}. 186 * 187 * <p>Built-in keys exposed by the Android SDK are always prefixed with {@code "android."}; 188 * keys that are device/platform-specific are prefixed with {@code "com."}.</p> 189 * 190 * <p>For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would 191 * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device 192 * specific key might look like {@code "com.google.nexus.data.private"}.</p> 193 * 194 * @return String representation of the key name 195 */ getName()196 public final String getName() { 197 return mName; 198 } 199 200 /** 201 * {@inheritDoc} 202 */ 203 @Override hashCode()204 public final int hashCode() { 205 return mHash; 206 } 207 208 /** 209 * Compare this key against other native keys, request keys, result keys, and 210 * characteristics keys. 211 * 212 * <p>Two keys are considered equal if their name and type reference are equal.</p> 213 * 214 * <p>Note that the equality against non-native keys is one-way. A native key may be equal 215 * to a result key; but that same result key will not be equal to a native key.</p> 216 */ 217 @SuppressWarnings("rawtypes") 218 @Override equals(Object o)219 public final boolean equals(Object o) { 220 if (this == o) { 221 return true; 222 } 223 224 if (o == null || this.hashCode() != o.hashCode()) { 225 return false; 226 } 227 228 Key<?> lhs; 229 230 if (o instanceof CaptureResult.Key) { 231 lhs = ((CaptureResult.Key)o).getNativeKey(); 232 } else if (o instanceof CaptureRequest.Key) { 233 lhs = ((CaptureRequest.Key)o).getNativeKey(); 234 } else if (o instanceof CameraCharacteristics.Key) { 235 lhs = ((CameraCharacteristics.Key)o).getNativeKey(); 236 } else if ((o instanceof Key)) { 237 lhs = (Key<?>)o; 238 } else { 239 return false; 240 } 241 242 return mName.equals(lhs.mName) && mTypeReference.equals(lhs.mTypeReference); 243 } 244 245 /** 246 * <p> 247 * Get the tag corresponding to this key. This enables insertion into the 248 * native metadata. 249 * </p> 250 * 251 * <p>This value is looked up the first time, and cached subsequently.</p> 252 * 253 * <p>This function may be called without cacheTag() if this is not a vendor key. 254 * If this is a vendor key, cacheTag() must be called first before getTag() can 255 * be called. Otherwise, mVendorId could be default (Long.MAX_VALUE) and vendor 256 * tag lookup could fail.</p> 257 * 258 * @return The tag numeric value corresponding to the string 259 */ 260 @UnsupportedAppUsage getTag()261 public final int getTag() { 262 if (!mHasTag) { 263 mTag = CameraMetadataNative.getTag(mName, mVendorId); 264 mHasTag = true; 265 } 266 return mTag; 267 } 268 269 /** 270 * Whether this key's tag is cached. 271 * 272 * @hide 273 */ 274 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) hasTag()275 public final boolean hasTag() { 276 return mHasTag; 277 } 278 279 /** 280 * Cache this key's tag. 281 * 282 * @hide 283 */ 284 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) cacheTag(int tag)285 public final void cacheTag(int tag) { 286 mHasTag = true; 287 mTag = tag; 288 } 289 290 /** 291 * Get the raw class backing the type {@code T} for this key. 292 * 293 * <p>The distinction is only important if {@code T} is a generic, e.g. 294 * {@code Range<Integer>} since the nested type will be erased.</p> 295 */ getType()296 public final Class<T> getType() { 297 // TODO: remove this; other places should use #getTypeReference() instead 298 return mType; 299 } 300 301 /** 302 * Get the vendor tag provider id. 303 * 304 * @hide 305 */ getVendorId()306 public final long getVendorId() { 307 return mVendorId; 308 } 309 310 /** 311 * Get the type reference backing the type {@code T} for this key. 312 * 313 * <p>The distinction is only important if {@code T} is a generic, e.g. 314 * {@code Range<Integer>} since the nested type will be retained.</p> 315 */ getTypeReference()316 public final TypeReference<T> getTypeReference() { 317 return mTypeReference; 318 } 319 } 320 321 private static final String TAG = "CameraMetadataJV"; 322 private static final boolean DEBUG = false; 323 324 // this should be in sync with HAL_PIXEL_FORMAT_BLOB defined in graphics.h 325 public static final int NATIVE_JPEG_FORMAT = 0x21; 326 327 private static final String CELLID_PROCESS = "CELLID"; 328 private static final String GPS_PROCESS = "GPS"; 329 private static final int FACE_LANDMARK_SIZE = 6; 330 331 private static final int MANDATORY_STREAM_CONFIGURATIONS_DEFAULT = 0; 332 private static final int MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION = 1; 333 private static final int MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT = 2; 334 translateLocationProviderToProcess(final String provider)335 private static String translateLocationProviderToProcess(final String provider) { 336 if (provider == null) { 337 return null; 338 } 339 switch(provider) { 340 case LocationManager.GPS_PROVIDER: 341 return GPS_PROCESS; 342 case LocationManager.NETWORK_PROVIDER: 343 return CELLID_PROCESS; 344 default: 345 return null; 346 } 347 } 348 translateProcessToLocationProvider(final String process)349 private static String translateProcessToLocationProvider(final String process) { 350 if (process == null) { 351 return null; 352 } 353 switch(process) { 354 case GPS_PROCESS: 355 return LocationManager.GPS_PROVIDER; 356 case CELLID_PROCESS: 357 return LocationManager.NETWORK_PROVIDER; 358 default: 359 return null; 360 } 361 } 362 CameraMetadataNative()363 public CameraMetadataNative() { 364 super(); 365 mMetadataPtr = nativeAllocate(); 366 if (mMetadataPtr == 0) { 367 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 368 } 369 updateNativeAllocation(); 370 } 371 372 /** 373 * Copy constructor - clone metadata 374 */ CameraMetadataNative(CameraMetadataNative other)375 public CameraMetadataNative(CameraMetadataNative other) { 376 super(); 377 mMetadataPtr = nativeAllocateCopy(other.mMetadataPtr); 378 if (mMetadataPtr == 0) { 379 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 380 } 381 updateNativeAllocation(); 382 } 383 384 /** 385 * Move the contents from {@code other} into a new camera metadata instance.</p> 386 * 387 * <p>After this call, {@code other} will become empty.</p> 388 * 389 * @param other the previous metadata instance which will get pilfered 390 * @return a new metadata instance with the values from {@code other} moved into it 391 */ move(CameraMetadataNative other)392 public static CameraMetadataNative move(CameraMetadataNative other) { 393 CameraMetadataNative newObject = new CameraMetadataNative(); 394 newObject.swap(other); 395 return newObject; 396 } 397 398 /** 399 * Set all metadata values in the destination argument by using the corresponding 400 * values from the source. Metadata tags present in the destination and absent 401 * from the source will remain unmodified. 402 * 403 * @param dst Destination metadata 404 * @param src Source metadata 405 * @hide 406 */ update(CameraMetadataNative dst, CameraMetadataNative src)407 public static void update(CameraMetadataNative dst, CameraMetadataNative src) { 408 nativeUpdate(dst.mMetadataPtr, src.mMetadataPtr); 409 } 410 411 public static final @android.annotation.NonNull Parcelable.Creator<CameraMetadataNative> CREATOR = 412 new Parcelable.Creator<CameraMetadataNative>() { 413 @Override 414 public CameraMetadataNative createFromParcel(Parcel in) { 415 CameraMetadataNative metadata = new CameraMetadataNative(); 416 metadata.readFromParcel(in); 417 return metadata; 418 } 419 420 @Override 421 public CameraMetadataNative[] newArray(int size) { 422 return new CameraMetadataNative[size]; 423 } 424 }; 425 426 @Override describeContents()427 public int describeContents() { 428 return 0; 429 } 430 431 @Override writeToParcel(Parcel dest, int flags)432 public void writeToParcel(Parcel dest, int flags) { 433 nativeWriteToParcel(dest, mMetadataPtr); 434 } 435 436 /** 437 * @hide 438 */ get(CameraCharacteristics.Key<T> key)439 public <T> T get(CameraCharacteristics.Key<T> key) { 440 return get(key.getNativeKey()); 441 } 442 443 /** 444 * @hide 445 */ get(CaptureResult.Key<T> key)446 public <T> T get(CaptureResult.Key<T> key) { 447 return get(key.getNativeKey()); 448 } 449 450 /** 451 * @hide 452 */ get(CaptureRequest.Key<T> key)453 public <T> T get(CaptureRequest.Key<T> key) { 454 return get(key.getNativeKey()); 455 } 456 457 /** 458 * Look-up a metadata field value by its key. 459 * 460 * @param key a non-{@code null} key instance 461 * @return the field corresponding to the {@code key}, or {@code null} if no value was set 462 */ get(Key<T> key)463 public <T> T get(Key<T> key) { 464 Objects.requireNonNull(key, "key must not be null"); 465 466 // Check if key has been overridden to use a wrapper class on the java side. 467 GetCommand g = sGetCommandMap.get(key); 468 if (g != null) { 469 return g.getValue(this, key); 470 } 471 return getBase(key); 472 } 473 readFromParcel(Parcel in)474 public void readFromParcel(Parcel in) { 475 nativeReadFromParcel(in, mMetadataPtr); 476 updateNativeAllocation(); 477 } 478 479 /** 480 * Set the global client-side vendor tag descriptor to allow use of vendor 481 * tags in camera applications. 482 * 483 * @throws ServiceSpecificException 484 * @hide 485 */ setupGlobalVendorTagDescriptor()486 public static void setupGlobalVendorTagDescriptor() throws ServiceSpecificException { 487 int err = nativeSetupGlobalVendorTagDescriptor(); 488 if (err != 0) { 489 throw new ServiceSpecificException(err, "Failure to set up global vendor tags"); 490 } 491 } 492 493 /** 494 * Set the global client-side vendor tag descriptor to allow use of vendor 495 * tags in camera applications. 496 * 497 * @return int An error code corresponding to one of the 498 * {@link ICameraService} error constants, or 0 on success. 499 */ nativeSetupGlobalVendorTagDescriptor()500 private static native int nativeSetupGlobalVendorTagDescriptor(); 501 502 /** 503 * Set a camera metadata field to a value. The field definitions can be 504 * found in {@link CameraCharacteristics}, {@link CaptureResult}, and 505 * {@link CaptureRequest}. 506 * 507 * @param key The metadata field to write. 508 * @param value The value to set the field to, which must be of a matching 509 * type to the key. 510 */ set(Key<T> key, T value)511 public <T> void set(Key<T> key, T value) { 512 SetCommand s = sSetCommandMap.get(key); 513 if (s != null) { 514 s.setValue(this, value); 515 return; 516 } 517 518 setBase(key, value); 519 } 520 set(CaptureRequest.Key<T> key, T value)521 public <T> void set(CaptureRequest.Key<T> key, T value) { 522 set(key.getNativeKey(), value); 523 } 524 set(CaptureResult.Key<T> key, T value)525 public <T> void set(CaptureResult.Key<T> key, T value) { 526 set(key.getNativeKey(), value); 527 } 528 set(CameraCharacteristics.Key<T> key, T value)529 public <T> void set(CameraCharacteristics.Key<T> key, T value) { 530 set(key.getNativeKey(), value); 531 } 532 533 // Keep up-to-date with camera_metadata.h 534 /** 535 * @hide 536 */ 537 public static final int TYPE_BYTE = 0; 538 /** 539 * @hide 540 */ 541 public static final int TYPE_INT32 = 1; 542 /** 543 * @hide 544 */ 545 public static final int TYPE_FLOAT = 2; 546 /** 547 * @hide 548 */ 549 public static final int TYPE_INT64 = 3; 550 /** 551 * @hide 552 */ 553 public static final int TYPE_DOUBLE = 4; 554 /** 555 * @hide 556 */ 557 public static final int TYPE_RATIONAL = 5; 558 /** 559 * @hide 560 */ 561 public static final int NUM_TYPES = 6; 562 close()563 private void close() { 564 // Delete native pointer, but does not clear it 565 nativeClose(mMetadataPtr); 566 mMetadataPtr = 0; 567 568 if (mBufferSize > 0) { 569 VMRuntime.getRuntime().registerNativeFree(mBufferSize); 570 } 571 mBufferSize = 0; 572 } 573 getBase(CameraCharacteristics.Key<T> key)574 private <T> T getBase(CameraCharacteristics.Key<T> key) { 575 return getBase(key.getNativeKey()); 576 } 577 getBase(CaptureResult.Key<T> key)578 private <T> T getBase(CaptureResult.Key<T> key) { 579 return getBase(key.getNativeKey()); 580 } 581 getBase(CaptureRequest.Key<T> key)582 private <T> T getBase(CaptureRequest.Key<T> key) { 583 return getBase(key.getNativeKey()); 584 } 585 getBase(Key<T> key)586 private <T> T getBase(Key<T> key) { 587 int tag; 588 if (key.hasTag()) { 589 tag = key.getTag(); 590 } else { 591 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.getName()); 592 key.cacheTag(tag); 593 } 594 byte[] values = readValues(tag); 595 if (values == null) { 596 // If the key returns null, use the fallback key if exists. 597 // This is to support old key names for the newly published keys. 598 if (key.mFallbackName == null) { 599 return null; 600 } 601 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.mFallbackName); 602 values = readValues(tag); 603 if (values == null) { 604 return null; 605 } 606 } 607 608 int nativeType = nativeGetTypeFromTagLocal(mMetadataPtr, tag); 609 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 610 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 611 return marshaler.unmarshal(buffer); 612 } 613 614 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 615 // metadata. 616 private static final HashMap<Key<?>, GetCommand> sGetCommandMap = 617 new HashMap<Key<?>, GetCommand>(); 618 static { 619 sGetCommandMap.put( GetCommand()620 CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), new GetCommand() { 621 @Override 622 @SuppressWarnings("unchecked") 623 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 624 return (T) metadata.getAvailableFormats(); 625 } 626 }); 627 sGetCommandMap.put( GetCommand()628 CaptureResult.STATISTICS_FACES.getNativeKey(), new GetCommand() { 629 @Override 630 @SuppressWarnings("unchecked") 631 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 632 return (T) metadata.getFaces(); 633 } 634 }); 635 sGetCommandMap.put( GetCommand()636 CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), new GetCommand() { 637 @Override 638 @SuppressWarnings("unchecked") 639 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 640 return (T) metadata.getFaceRectangles(); 641 } 642 }); 643 sGetCommandMap.put( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey()644 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey(), 645 new GetCommand() { 646 @Override 647 @SuppressWarnings("unchecked") 648 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 649 return (T) metadata.getStreamConfigurationMap(); 650 } 651 }); 652 sGetCommandMap.put( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION.getNativeKey()653 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION.getNativeKey(), 654 new GetCommand() { 655 @Override 656 @SuppressWarnings("unchecked") 657 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 658 return (T) metadata.getStreamConfigurationMapMaximumResolution(); 659 } 660 }); 661 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey()662 CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey(), 663 new GetCommand() { 664 @Override 665 @SuppressWarnings("unchecked") 666 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 667 return (T) metadata.getMandatoryStreamCombinations(); 668 } 669 }); 670 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_CONCURRENT_STREAM_COMBINATIONS.getNativeKey()671 CameraCharacteristics.SCALER_MANDATORY_CONCURRENT_STREAM_COMBINATIONS.getNativeKey(), 672 new GetCommand() { 673 @Override 674 @SuppressWarnings("unchecked") 675 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 676 return (T) metadata.getMandatoryConcurrentStreamCombinations(); 677 } 678 }); 679 680 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_MAXIMUM_RESOLUTION_STREAM_COMBINATIONS.getNativeKey()681 CameraCharacteristics.SCALER_MANDATORY_MAXIMUM_RESOLUTION_STREAM_COMBINATIONS.getNativeKey(), 682 new GetCommand() { 683 @Override 684 @SuppressWarnings("unchecked") 685 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 686 return (T) metadata.getMandatoryMaximumResolutionStreamCombinations(); 687 } 688 }); 689 690 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey()691 CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey(), new GetCommand() { 692 @Override 693 @SuppressWarnings("unchecked") 694 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 695 return (T) metadata.getMaxRegions(key); 696 } 697 }); 698 sGetCommandMap.put( GetCommand()699 CameraCharacteristics.CONTROL_MAX_REGIONS_AWB.getNativeKey(), new GetCommand() { 700 @Override 701 @SuppressWarnings("unchecked") 702 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 703 return (T) metadata.getMaxRegions(key); 704 } 705 }); 706 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey()707 CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey(), new GetCommand() { 708 @Override 709 @SuppressWarnings("unchecked") 710 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 711 return (T) metadata.getMaxRegions(key); 712 } 713 }); 714 sGetCommandMap.put( GetCommand()715 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW.getNativeKey(), new GetCommand() { 716 @Override 717 @SuppressWarnings("unchecked") 718 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 719 return (T) metadata.getMaxNumOutputs(key); 720 } 721 }); 722 sGetCommandMap.put( GetCommand()723 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC.getNativeKey(), new GetCommand() { 724 @Override 725 @SuppressWarnings("unchecked") 726 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 727 return (T) metadata.getMaxNumOutputs(key); 728 } 729 }); 730 sGetCommandMap.put( CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey()731 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey(), 732 new GetCommand() { 733 @Override 734 @SuppressWarnings("unchecked") 735 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 736 return (T) metadata.getMaxNumOutputs(key); 737 } 738 }); 739 sGetCommandMap.put( GetCommand()740 CaptureRequest.TONEMAP_CURVE.getNativeKey(), new GetCommand() { 741 @Override 742 @SuppressWarnings("unchecked") 743 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 744 return (T) metadata.getTonemapCurve(); 745 } 746 }); 747 sGetCommandMap.put( GetCommand()748 CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new GetCommand() { 749 @Override 750 @SuppressWarnings("unchecked") 751 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 752 return (T) metadata.getGpsLocation(); 753 } 754 }); 755 sGetCommandMap.put( CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey()756 CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey(), 757 new GetCommand() { 758 @Override 759 @SuppressWarnings("unchecked") 760 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 761 return (T) metadata.getLensShadingMap(); 762 } 763 }); 764 sGetCommandMap.put( CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey()765 CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey(), 766 new GetCommand() { 767 @Override 768 @SuppressWarnings("unchecked") 769 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 770 return (T) metadata.getOisSamples(); 771 } 772 }); 773 sGetCommandMap.put( CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES.getNativeKey()774 CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES.getNativeKey(), 775 new GetCommand() { 776 @Override 777 @SuppressWarnings("unchecked") 778 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 779 return (T) metadata.getExtendedSceneModeCapabilities(); 780 } 781 }); 782 sGetCommandMap.put( CameraCharacteristics.SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP.getNativeKey()783 CameraCharacteristics.SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP.getNativeKey(), 784 new GetCommand() { 785 @Override 786 @SuppressWarnings("unchecked") 787 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 788 return (T) metadata.getMultiResolutionStreamConfigurationMap(); 789 } 790 }); 791 } 792 getAvailableFormats()793 private int[] getAvailableFormats() { 794 int[] availableFormats = getBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS); 795 if (availableFormats != null) { 796 for (int i = 0; i < availableFormats.length; i++) { 797 // JPEG has different value between native and managed side, need override. 798 if (availableFormats[i] == NATIVE_JPEG_FORMAT) { 799 availableFormats[i] = ImageFormat.JPEG; 800 } 801 } 802 } 803 804 return availableFormats; 805 } 806 setFaces(Face[] faces)807 private boolean setFaces(Face[] faces) { 808 if (faces == null) { 809 return false; 810 } 811 812 int numFaces = faces.length; 813 814 // Detect if all faces are SIMPLE or not; count # of valid faces 815 boolean fullMode = true; 816 for (Face face : faces) { 817 if (face == null) { 818 numFaces--; 819 Log.w(TAG, "setFaces - null face detected, skipping"); 820 continue; 821 } 822 823 if (face.getId() == Face.ID_UNSUPPORTED) { 824 fullMode = false; 825 } 826 } 827 828 Rect[] faceRectangles = new Rect[numFaces]; 829 byte[] faceScores = new byte[numFaces]; 830 int[] faceIds = null; 831 int[] faceLandmarks = null; 832 833 if (fullMode) { 834 faceIds = new int[numFaces]; 835 faceLandmarks = new int[numFaces * FACE_LANDMARK_SIZE]; 836 } 837 838 int i = 0; 839 for (Face face : faces) { 840 if (face == null) { 841 continue; 842 } 843 844 faceRectangles[i] = face.getBounds(); 845 faceScores[i] = (byte)face.getScore(); 846 847 if (fullMode) { 848 faceIds[i] = face.getId(); 849 850 int j = 0; 851 852 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().x; 853 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().y; 854 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().x; 855 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().y; 856 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().x; 857 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().y; 858 } 859 860 i++; 861 } 862 863 set(CaptureResult.STATISTICS_FACE_RECTANGLES, faceRectangles); 864 set(CaptureResult.STATISTICS_FACE_IDS, faceIds); 865 set(CaptureResult.STATISTICS_FACE_LANDMARKS, faceLandmarks); 866 set(CaptureResult.STATISTICS_FACE_SCORES, faceScores); 867 868 return true; 869 } 870 getFaces()871 private Face[] getFaces() { 872 Integer faceDetectMode = get(CaptureResult.STATISTICS_FACE_DETECT_MODE); 873 byte[] faceScores = get(CaptureResult.STATISTICS_FACE_SCORES); 874 Rect[] faceRectangles = get(CaptureResult.STATISTICS_FACE_RECTANGLES); 875 int[] faceIds = get(CaptureResult.STATISTICS_FACE_IDS); 876 int[] faceLandmarks = get(CaptureResult.STATISTICS_FACE_LANDMARKS); 877 878 if (areValuesAllNull(faceDetectMode, faceScores, faceRectangles, faceIds, faceLandmarks)) { 879 return null; 880 } 881 882 if (faceDetectMode == null) { 883 Log.w(TAG, "Face detect mode metadata is null, assuming the mode is SIMPLE"); 884 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 885 } else if (faceDetectMode > CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 886 // Face detect mode is larger than FULL, assuming the mode is FULL 887 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL; 888 } else { 889 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) { 890 return new Face[0]; 891 } 892 if (faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE && 893 faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 894 Log.w(TAG, "Unknown face detect mode: " + faceDetectMode); 895 return new Face[0]; 896 } 897 } 898 899 // Face scores and rectangles are required by SIMPLE and FULL mode. 900 if (faceScores == null || faceRectangles == null) { 901 Log.w(TAG, "Expect face scores and rectangles to be non-null"); 902 return new Face[0]; 903 } else if (faceScores.length != faceRectangles.length) { 904 Log.w(TAG, String.format("Face score size(%d) doesn match face rectangle size(%d)!", 905 faceScores.length, faceRectangles.length)); 906 } 907 908 // To be safe, make number of faces is the minimal of all face info metadata length. 909 int numFaces = Math.min(faceScores.length, faceRectangles.length); 910 // Face id and landmarks are only required by FULL mode. 911 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 912 if (faceIds == null || faceLandmarks == null) { 913 Log.w(TAG, "Expect face ids and landmarks to be non-null for FULL mode," + 914 "fallback to SIMPLE mode"); 915 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 916 } else { 917 if (faceIds.length != numFaces || 918 faceLandmarks.length != numFaces * FACE_LANDMARK_SIZE) { 919 Log.w(TAG, String.format("Face id size(%d), or face landmark size(%d) don't" + 920 "match face number(%d)!", 921 faceIds.length, faceLandmarks.length * FACE_LANDMARK_SIZE, numFaces)); 922 } 923 // To be safe, make number of faces is the minimal of all face info metadata length. 924 numFaces = Math.min(numFaces, faceIds.length); 925 numFaces = Math.min(numFaces, faceLandmarks.length / FACE_LANDMARK_SIZE); 926 } 927 } 928 929 ArrayList<Face> faceList = new ArrayList<Face>(); 930 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) { 931 for (int i = 0; i < numFaces; i++) { 932 if (faceScores[i] <= Face.SCORE_MAX && 933 faceScores[i] >= Face.SCORE_MIN) { 934 faceList.add(new Face(faceRectangles[i], faceScores[i])); 935 } 936 } 937 } else { 938 // CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL 939 for (int i = 0; i < numFaces; i++) { 940 if (faceScores[i] <= Face.SCORE_MAX && 941 faceScores[i] >= Face.SCORE_MIN && 942 faceIds[i] >= 0) { 943 Point leftEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE], 944 faceLandmarks[i*FACE_LANDMARK_SIZE+1]); 945 Point rightEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+2], 946 faceLandmarks[i*FACE_LANDMARK_SIZE+3]); 947 Point mouth = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+4], 948 faceLandmarks[i*FACE_LANDMARK_SIZE+5]); 949 Face face = new Face(faceRectangles[i], faceScores[i], faceIds[i], 950 leftEye, rightEye, mouth); 951 faceList.add(face); 952 } 953 } 954 } 955 Face[] faces = new Face[faceList.size()]; 956 faceList.toArray(faces); 957 return faces; 958 } 959 960 // Face rectangles are defined as (left, top, right, bottom) instead of 961 // (left, top, width, height) at the native level, so the normal Rect 962 // conversion that does (l, t, w, h) -> (l, t, r, b) is unnecessary. Undo 963 // that conversion here for just the faces. getFaceRectangles()964 private Rect[] getFaceRectangles() { 965 Rect[] faceRectangles = getBase(CaptureResult.STATISTICS_FACE_RECTANGLES); 966 if (faceRectangles == null) return null; 967 968 Rect[] fixedFaceRectangles = new Rect[faceRectangles.length]; 969 for (int i = 0; i < faceRectangles.length; i++) { 970 fixedFaceRectangles[i] = new Rect( 971 faceRectangles[i].left, 972 faceRectangles[i].top, 973 faceRectangles[i].right - faceRectangles[i].left, 974 faceRectangles[i].bottom - faceRectangles[i].top); 975 } 976 return fixedFaceRectangles; 977 } 978 getLensShadingMap()979 private LensShadingMap getLensShadingMap() { 980 float[] lsmArray = getBase(CaptureResult.STATISTICS_LENS_SHADING_MAP); 981 Size s = get(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE); 982 983 // Do not warn if lsmArray is null while s is not. This is valid. 984 if (lsmArray == null) { 985 return null; 986 } 987 988 if (s == null) { 989 Log.w(TAG, "getLensShadingMap - Lens shading map size was null."); 990 return null; 991 } 992 993 LensShadingMap map = new LensShadingMap(lsmArray, s.getHeight(), s.getWidth()); 994 return map; 995 } 996 getGpsLocation()997 private Location getGpsLocation() { 998 String processingMethod = get(CaptureResult.JPEG_GPS_PROCESSING_METHOD); 999 double[] coords = get(CaptureResult.JPEG_GPS_COORDINATES); 1000 Long timeStamp = get(CaptureResult.JPEG_GPS_TIMESTAMP); 1001 1002 if (areValuesAllNull(processingMethod, coords, timeStamp)) { 1003 return null; 1004 } 1005 1006 Location l = new Location(translateProcessToLocationProvider(processingMethod)); 1007 if (timeStamp != null) { 1008 // Location expects timestamp in [ms.] 1009 l.setTime(timeStamp * 1000); 1010 } else { 1011 Log.w(TAG, "getGpsLocation - No timestamp for GPS location."); 1012 } 1013 1014 if (coords != null) { 1015 l.setLatitude(coords[0]); 1016 l.setLongitude(coords[1]); 1017 l.setAltitude(coords[2]); 1018 } else { 1019 Log.w(TAG, "getGpsLocation - No coordinates for GPS location"); 1020 } 1021 1022 return l; 1023 } 1024 setGpsLocation(Location l)1025 private boolean setGpsLocation(Location l) { 1026 if (l == null) { 1027 return false; 1028 } 1029 1030 double[] coords = { l.getLatitude(), l.getLongitude(), l.getAltitude() }; 1031 String processMethod = translateLocationProviderToProcess(l.getProvider()); 1032 //JPEG_GPS_TIMESTAMP expects sec. instead of msec. 1033 long timestamp = l.getTime() / 1000; 1034 1035 set(CaptureRequest.JPEG_GPS_TIMESTAMP, timestamp); 1036 set(CaptureRequest.JPEG_GPS_COORDINATES, coords); 1037 1038 if (processMethod == null) { 1039 Log.w(TAG, "setGpsLocation - No process method, Location is not from a GPS or NETWORK" + 1040 "provider"); 1041 } else { 1042 setBase(CaptureRequest.JPEG_GPS_PROCESSING_METHOD, processMethod); 1043 } 1044 return true; 1045 } 1046 parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, StreamConfigurationMap fullMap, boolean isDepth, ArrayList<ArrayList<StreamConfiguration>> streamConfigList, ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList, ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList, boolean[] supportsPrivate)1047 private void parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, 1048 StreamConfigurationMap fullMap, boolean isDepth, 1049 ArrayList<ArrayList<StreamConfiguration>> /*out*/streamConfigList, 1050 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamDurationList, 1051 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamStallList, 1052 boolean[] /*out*/supportsPrivate) { 1053 1054 streamConfigList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1055 streamDurationList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1056 streamStallList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1057 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1058 streamConfigList.add(new ArrayList<StreamConfiguration> ()); 1059 streamDurationList.add(new ArrayList<StreamConfigurationDuration> ()); 1060 streamStallList.add(new ArrayList<StreamConfigurationDuration> ()); 1061 } 1062 1063 for (RecommendedStreamConfiguration c : configurations) { 1064 int width = c.getWidth(); 1065 int height = c.getHeight(); 1066 int internalFormat = c.getFormat(); 1067 int publicFormat = 1068 (isDepth) ? StreamConfigurationMap.depthFormatToPublic(internalFormat) : 1069 StreamConfigurationMap.imageFormatToPublic(internalFormat); 1070 Size sz = new Size(width, height); 1071 int usecaseBitmap = c.getUsecaseBitmap(); 1072 1073 if (!c.isInput()) { 1074 StreamConfigurationDuration minDurationConfiguration = null; 1075 StreamConfigurationDuration stallDurationConfiguration = null; 1076 1077 StreamConfiguration streamConfiguration = new StreamConfiguration(internalFormat, 1078 width, height, /*input*/ false); 1079 1080 long minFrameDuration = fullMap.getOutputMinFrameDuration(publicFormat, sz); 1081 if (minFrameDuration > 0) { 1082 minDurationConfiguration = new StreamConfigurationDuration(internalFormat, 1083 width, height, minFrameDuration); 1084 } 1085 1086 long stallDuration = fullMap.getOutputStallDuration(publicFormat, sz); 1087 if (stallDuration > 0) { 1088 stallDurationConfiguration = new StreamConfigurationDuration(internalFormat, 1089 width, height, stallDuration); 1090 } 1091 1092 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1093 if ((usecaseBitmap & (1 << i)) != 0) { 1094 ArrayList<StreamConfiguration> sc = streamConfigList.get(i); 1095 sc.add(streamConfiguration); 1096 1097 if (minFrameDuration > 0) { 1098 ArrayList<StreamConfigurationDuration> scd = streamDurationList.get(i); 1099 scd.add(minDurationConfiguration); 1100 } 1101 1102 if (stallDuration > 0) { 1103 ArrayList<StreamConfigurationDuration> scs = streamStallList.get(i); 1104 scs.add(stallDurationConfiguration); 1105 } 1106 1107 if ((supportsPrivate != null) && !supportsPrivate[i] && 1108 (publicFormat == ImageFormat.PRIVATE)) { 1109 supportsPrivate[i] = true; 1110 } 1111 } 1112 } 1113 } else { 1114 if (usecaseBitmap != (1 << RecommendedStreamConfigurationMap.USECASE_ZSL)) { 1115 throw new IllegalArgumentException("Recommended input stream configurations " + 1116 "should only be advertised in the ZSL use case!"); 1117 } 1118 1119 ArrayList<StreamConfiguration> sc = streamConfigList.get( 1120 RecommendedStreamConfigurationMap.USECASE_ZSL); 1121 sc.add(new StreamConfiguration(internalFormat, 1122 width, height, /*input*/ true)); 1123 } 1124 } 1125 } 1126 1127 private class StreamConfigurationData { 1128 StreamConfiguration [] streamConfigurationArray = null; 1129 StreamConfigurationDuration [] minDurationArray = null; 1130 StreamConfigurationDuration [] stallDurationArray = null; 1131 } 1132 initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, StreamConfigurationData scData)1133 public void initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, 1134 ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, 1135 StreamConfigurationData /*out*/scData) { 1136 if ((scData == null) || (sc == null)) { 1137 return; 1138 } 1139 1140 scData.streamConfigurationArray = new StreamConfiguration[sc.size()]; 1141 scData.streamConfigurationArray = sc.toArray(scData.streamConfigurationArray); 1142 1143 if ((scd != null) && !scd.isEmpty()) { 1144 scData.minDurationArray = new StreamConfigurationDuration[scd.size()]; 1145 scData.minDurationArray = scd.toArray(scData.minDurationArray); 1146 } else { 1147 scData.minDurationArray = new StreamConfigurationDuration[0]; 1148 } 1149 1150 if ((scs != null) && !scs.isEmpty()) { 1151 scData.stallDurationArray = new StreamConfigurationDuration[scs.size()]; 1152 scData.stallDurationArray = scs.toArray(scData.stallDurationArray); 1153 } else { 1154 scData.stallDurationArray = new StreamConfigurationDuration[0]; 1155 } 1156 } 1157 1158 /** 1159 * Retrieve the list of recommended stream configurations. 1160 * 1161 * @return A list of recommended stream configuration maps for each common use case or null 1162 * in case the recommended stream configurations are invalid or incomplete. 1163 * @hide 1164 */ getRecommendedStreamConfigurations()1165 public ArrayList<RecommendedStreamConfigurationMap> getRecommendedStreamConfigurations() { 1166 RecommendedStreamConfiguration[] configurations = getBase( 1167 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS); 1168 RecommendedStreamConfiguration[] depthConfigurations = getBase( 1169 CameraCharacteristics.DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS); 1170 if ((configurations == null) && (depthConfigurations == null)) { 1171 return null; 1172 } 1173 1174 StreamConfigurationMap fullMap = getStreamConfigurationMap(); 1175 ArrayList<RecommendedStreamConfigurationMap> recommendedConfigurations = 1176 new ArrayList<RecommendedStreamConfigurationMap> (); 1177 1178 ArrayList<ArrayList<StreamConfiguration>> streamConfigList = 1179 new ArrayList<ArrayList<StreamConfiguration>>(); 1180 ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList = 1181 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1182 ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList = 1183 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1184 boolean[] supportsPrivate = 1185 new boolean[RecommendedStreamConfigurationMap.MAX_USECASE_COUNT]; 1186 try { 1187 if (configurations != null) { 1188 parseRecommendedConfigurations(configurations, fullMap, /*isDepth*/ false, 1189 streamConfigList, streamDurationList, streamStallList, supportsPrivate); 1190 } 1191 } catch (IllegalArgumentException e) { 1192 Log.e(TAG, "Failed parsing the recommended stream configurations!"); 1193 return null; 1194 } 1195 1196 ArrayList<ArrayList<StreamConfiguration>> depthStreamConfigList = 1197 new ArrayList<ArrayList<StreamConfiguration>>(); 1198 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamDurationList = 1199 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1200 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamStallList = 1201 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1202 if (depthConfigurations != null) { 1203 try { 1204 parseRecommendedConfigurations(depthConfigurations, fullMap, /*isDepth*/ true, 1205 depthStreamConfigList, depthStreamDurationList, depthStreamStallList, 1206 /*supportsPrivate*/ null); 1207 } catch (IllegalArgumentException e) { 1208 Log.e(TAG, "Failed parsing the recommended depth stream configurations!"); 1209 return null; 1210 } 1211 } 1212 1213 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1214 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP); 1215 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1216 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1217 boolean listHighResolution = isBurstSupported(); 1218 recommendedConfigurations.ensureCapacity( 1219 RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1220 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1221 StreamConfigurationData scData = new StreamConfigurationData(); 1222 if (configurations != null) { 1223 initializeStreamConfigurationData(streamConfigList.get(i), 1224 streamDurationList.get(i), streamStallList.get(i), scData); 1225 } 1226 1227 StreamConfigurationData depthScData = new StreamConfigurationData(); 1228 if (depthConfigurations != null) { 1229 initializeStreamConfigurationData(depthStreamConfigList.get(i), 1230 depthStreamDurationList.get(i), depthStreamStallList.get(i), depthScData); 1231 } 1232 1233 if ((scData.streamConfigurationArray == null || 1234 scData.streamConfigurationArray.length == 0) && 1235 (depthScData.streamConfigurationArray == null || 1236 depthScData.streamConfigurationArray.length == 0)) { 1237 recommendedConfigurations.add(null); 1238 continue; 1239 } 1240 1241 // Dynamic depth streams involve alot of SW processing and currently cannot be 1242 // recommended. 1243 StreamConfigurationMap map = null; 1244 switch (i) { 1245 case RecommendedStreamConfigurationMap.USECASE_PREVIEW: 1246 case RecommendedStreamConfigurationMap.USECASE_RAW: 1247 case RecommendedStreamConfigurationMap.USECASE_LOW_LATENCY_SNAPSHOT: 1248 case RecommendedStreamConfigurationMap.USECASE_VIDEO_SNAPSHOT: 1249 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1250 scData.minDurationArray, scData.stallDurationArray, 1251 /*depthconfiguration*/ null, /*depthminduration*/ null, 1252 /*depthstallduration*/ null, 1253 /*dynamicDepthConfigurations*/ null, 1254 /*dynamicDepthMinFrameDurations*/ null, 1255 /*dynamicDepthStallDurations*/ null, 1256 /*heicconfiguration*/ null, 1257 /*heicminduration*/ null, 1258 /*heicstallduration*/ null, 1259 /*highspeedvideoconfigurations*/ null, 1260 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1261 break; 1262 case RecommendedStreamConfigurationMap.USECASE_RECORD: 1263 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1264 scData.minDurationArray, scData.stallDurationArray, 1265 /*depthconfiguration*/ null, /*depthminduration*/ null, 1266 /*depthstallduration*/ null, 1267 /*dynamicDepthConfigurations*/ null, 1268 /*dynamicDepthMinFrameDurations*/ null, 1269 /*dynamicDepthStallDurations*/ null, 1270 /*heicconfiguration*/ null, 1271 /*heicminduration*/ null, 1272 /*heicstallduration*/ null, 1273 highSpeedVideoConfigurations, 1274 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1275 break; 1276 case RecommendedStreamConfigurationMap.USECASE_ZSL: 1277 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1278 scData.minDurationArray, scData.stallDurationArray, 1279 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1280 depthScData.stallDurationArray, 1281 /*dynamicDepthConfigurations*/ null, 1282 /*dynamicDepthMinFrameDurations*/ null, 1283 /*dynamicDepthStallDurations*/ null, 1284 /*heicconfiguration*/ null, 1285 /*heicminduration*/ null, 1286 /*heicstallduration*/ null, 1287 /*highSpeedVideoConfigurations*/ null, 1288 inputOutputFormatsMap, listHighResolution, supportsPrivate[i]); 1289 break; 1290 default: 1291 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1292 scData.minDurationArray, scData.stallDurationArray, 1293 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1294 depthScData.stallDurationArray, 1295 /*dynamicDepthConfigurations*/ null, 1296 /*dynamicDepthMinFrameDurations*/ null, 1297 /*dynamicDepthStallDurations*/ null, 1298 /*heicconfiguration*/ null, 1299 /*heicminduration*/ null, 1300 /*heicstallduration*/ null, 1301 /*highSpeedVideoConfigurations*/ null, 1302 /*inputOutputFormatsMap*/ null, listHighResolution, supportsPrivate[i]); 1303 } 1304 1305 recommendedConfigurations.add(new RecommendedStreamConfigurationMap(map, /*usecase*/i, 1306 supportsPrivate[i])); 1307 } 1308 1309 return recommendedConfigurations; 1310 } 1311 isCapabilitySupported(int capabilityRequested)1312 private boolean isCapabilitySupported(int capabilityRequested) { 1313 boolean ret = false; 1314 1315 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1316 for (int capability : capabilities) { 1317 if (capabilityRequested == capability) { 1318 ret = true; 1319 break; 1320 } 1321 } 1322 1323 return ret; 1324 } 1325 1326 /** 1327 * @hide 1328 */ isUltraHighResolutionSensor()1329 public boolean isUltraHighResolutionSensor() { 1330 return isCapabilitySupported( 1331 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR); 1332 1333 } isBurstSupported()1334 private boolean isBurstSupported() { 1335 return isCapabilitySupported( 1336 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE); 1337 } 1338 getMandatoryStreamCombinationsHelper( int mandatoryStreamsType)1339 private MandatoryStreamCombination[] getMandatoryStreamCombinationsHelper( 1340 int mandatoryStreamsType) { 1341 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1342 ArrayList<Integer> caps = new ArrayList<Integer>(); 1343 caps.ensureCapacity(capabilities.length); 1344 for (int c : capabilities) { 1345 caps.add(new Integer(c)); 1346 } 1347 int hwLevel = getBase(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); 1348 MandatoryStreamCombination.Builder build = new MandatoryStreamCombination.Builder( 1349 mCameraId, hwLevel, mDisplaySize, caps, getStreamConfigurationMap(), 1350 getStreamConfigurationMapMaximumResolution()); 1351 1352 List<MandatoryStreamCombination> combs = null; 1353 switch (mandatoryStreamsType) { 1354 case MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT: 1355 combs = build.getAvailableMandatoryConcurrentStreamCombinations(); 1356 break; 1357 case MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION: 1358 combs = build.getAvailableMandatoryMaximumResolutionStreamCombinations(); 1359 break; 1360 default: 1361 combs = build.getAvailableMandatoryStreamCombinations(); 1362 } 1363 if ((combs != null) && (!combs.isEmpty())) { 1364 MandatoryStreamCombination[] combArray = new MandatoryStreamCombination[combs.size()]; 1365 combArray = combs.toArray(combArray); 1366 return combArray; 1367 } 1368 return null; 1369 } 1370 getMandatoryConcurrentStreamCombinations()1371 private MandatoryStreamCombination[] getMandatoryConcurrentStreamCombinations() { 1372 if (!mHasMandatoryConcurrentStreams) { 1373 return null; 1374 } 1375 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT); 1376 } 1377 getMandatoryMaximumResolutionStreamCombinations()1378 private MandatoryStreamCombination[] getMandatoryMaximumResolutionStreamCombinations() { 1379 if (!isUltraHighResolutionSensor()) { 1380 return null; 1381 } 1382 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION); 1383 } 1384 getMandatoryStreamCombinations()1385 private MandatoryStreamCombination[] getMandatoryStreamCombinations() { 1386 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_DEFAULT); 1387 } 1388 getStreamConfigurationMap()1389 private StreamConfigurationMap getStreamConfigurationMap() { 1390 StreamConfiguration[] configurations = getBase( 1391 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS); 1392 StreamConfigurationDuration[] minFrameDurations = getBase( 1393 CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS); 1394 StreamConfigurationDuration[] stallDurations = getBase( 1395 CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS); 1396 StreamConfiguration[] depthConfigurations = getBase( 1397 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS); 1398 StreamConfigurationDuration[] depthMinFrameDurations = getBase( 1399 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS); 1400 StreamConfigurationDuration[] depthStallDurations = getBase( 1401 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS); 1402 StreamConfiguration[] dynamicDepthConfigurations = getBase( 1403 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS); 1404 StreamConfigurationDuration[] dynamicDepthMinFrameDurations = getBase( 1405 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS); 1406 StreamConfigurationDuration[] dynamicDepthStallDurations = getBase( 1407 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS); 1408 StreamConfiguration[] heicConfigurations = getBase( 1409 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS); 1410 StreamConfigurationDuration[] heicMinFrameDurations = getBase( 1411 CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS); 1412 StreamConfigurationDuration[] heicStallDurations = getBase( 1413 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS); 1414 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1415 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1416 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1417 CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP); 1418 boolean listHighResolution = isBurstSupported(); 1419 return new StreamConfigurationMap( 1420 configurations, minFrameDurations, stallDurations, 1421 depthConfigurations, depthMinFrameDurations, depthStallDurations, 1422 dynamicDepthConfigurations, dynamicDepthMinFrameDurations, 1423 dynamicDepthStallDurations, heicConfigurations, 1424 heicMinFrameDurations, heicStallDurations, 1425 highSpeedVideoConfigurations, inputOutputFormatsMap, 1426 listHighResolution); 1427 } 1428 getStreamConfigurationMapMaximumResolution()1429 private StreamConfigurationMap getStreamConfigurationMapMaximumResolution() { 1430 if (!isUltraHighResolutionSensor()) { 1431 return null; 1432 } 1433 StreamConfiguration[] configurations = getBase( 1434 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1435 StreamConfigurationDuration[] minFrameDurations = getBase( 1436 CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1437 StreamConfigurationDuration[] stallDurations = getBase( 1438 CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1439 StreamConfiguration[] depthConfigurations = getBase( 1440 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1441 StreamConfigurationDuration[] depthMinFrameDurations = getBase( 1442 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1443 StreamConfigurationDuration[] depthStallDurations = getBase( 1444 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1445 StreamConfiguration[] dynamicDepthConfigurations = getBase( 1446 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1447 StreamConfigurationDuration[] dynamicDepthMinFrameDurations = getBase( 1448 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1449 StreamConfigurationDuration[] dynamicDepthStallDurations = getBase( 1450 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1451 StreamConfiguration[] heicConfigurations = getBase( 1452 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1453 StreamConfigurationDuration[] heicMinFrameDurations = getBase( 1454 CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1455 StreamConfigurationDuration[] heicStallDurations = getBase( 1456 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1457 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1458 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1459 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1460 CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION); 1461 // TODO: Is this correct, burst capability shouldn't necessarily correspond to max res mode 1462 boolean listHighResolution = isBurstSupported(); 1463 return new StreamConfigurationMap( 1464 configurations, minFrameDurations, stallDurations, 1465 depthConfigurations, depthMinFrameDurations, depthStallDurations, 1466 dynamicDepthConfigurations, dynamicDepthMinFrameDurations, 1467 dynamicDepthStallDurations, heicConfigurations, 1468 heicMinFrameDurations, heicStallDurations, 1469 highSpeedVideoConfigurations, inputOutputFormatsMap, 1470 listHighResolution, false); 1471 } 1472 getMaxRegions(Key<T> key)1473 private <T> Integer getMaxRegions(Key<T> key) { 1474 final int AE = 0; 1475 final int AWB = 1; 1476 final int AF = 2; 1477 1478 // The order of the elements is: (AE, AWB, AF) 1479 int[] maxRegions = getBase(CameraCharacteristics.CONTROL_MAX_REGIONS); 1480 1481 if (maxRegions == null) { 1482 return null; 1483 } 1484 1485 if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AE)) { 1486 return maxRegions[AE]; 1487 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB)) { 1488 return maxRegions[AWB]; 1489 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AF)) { 1490 return maxRegions[AF]; 1491 } else { 1492 throw new AssertionError("Invalid key " + key); 1493 } 1494 } 1495 getMaxNumOutputs(Key<T> key)1496 private <T> Integer getMaxNumOutputs(Key<T> key) { 1497 final int RAW = 0; 1498 final int PROC = 1; 1499 final int PROC_STALLING = 2; 1500 1501 // The order of the elements is: (raw, proc+nonstalling, proc+stalling) 1502 int[] maxNumOutputs = getBase(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS); 1503 1504 if (maxNumOutputs == null) { 1505 return null; 1506 } 1507 1508 if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)) { 1509 return maxNumOutputs[RAW]; 1510 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)) { 1511 return maxNumOutputs[PROC]; 1512 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)) { 1513 return maxNumOutputs[PROC_STALLING]; 1514 } else { 1515 throw new AssertionError("Invalid key " + key); 1516 } 1517 } 1518 getTonemapCurve()1519 private <T> TonemapCurve getTonemapCurve() { 1520 float[] red = getBase(CaptureRequest.TONEMAP_CURVE_RED); 1521 float[] green = getBase(CaptureRequest.TONEMAP_CURVE_GREEN); 1522 float[] blue = getBase(CaptureRequest.TONEMAP_CURVE_BLUE); 1523 1524 if (areValuesAllNull(red, green, blue)) { 1525 return null; 1526 } 1527 1528 if (red == null || green == null || blue == null) { 1529 Log.w(TAG, "getTonemapCurve - missing tone curve components"); 1530 return null; 1531 } 1532 TonemapCurve tc = new TonemapCurve(red, green, blue); 1533 return tc; 1534 } 1535 getOisSamples()1536 private OisSample[] getOisSamples() { 1537 long[] timestamps = getBase(CaptureResult.STATISTICS_OIS_TIMESTAMPS); 1538 float[] xShifts = getBase(CaptureResult.STATISTICS_OIS_X_SHIFTS); 1539 float[] yShifts = getBase(CaptureResult.STATISTICS_OIS_Y_SHIFTS); 1540 1541 if (timestamps == null) { 1542 if (xShifts != null) { 1543 throw new AssertionError("timestamps is null but xShifts is not"); 1544 } 1545 1546 if (yShifts != null) { 1547 throw new AssertionError("timestamps is null but yShifts is not"); 1548 } 1549 1550 return null; 1551 } 1552 1553 if (xShifts == null) { 1554 throw new AssertionError("timestamps is not null but xShifts is"); 1555 } 1556 1557 if (yShifts == null) { 1558 throw new AssertionError("timestamps is not null but yShifts is"); 1559 } 1560 1561 if (xShifts.length != timestamps.length) { 1562 throw new AssertionError(String.format( 1563 "timestamps has %d entries but xShifts has %d", timestamps.length, 1564 xShifts.length)); 1565 } 1566 1567 if (yShifts.length != timestamps.length) { 1568 throw new AssertionError(String.format( 1569 "timestamps has %d entries but yShifts has %d", timestamps.length, 1570 yShifts.length)); 1571 } 1572 1573 OisSample[] samples = new OisSample[timestamps.length]; 1574 for (int i = 0; i < timestamps.length; i++) { 1575 samples[i] = new OisSample(timestamps[i], xShifts[i], yShifts[i]); 1576 } 1577 return samples; 1578 } 1579 getExtendedSceneModeCapabilities()1580 private Capability[] getExtendedSceneModeCapabilities() { 1581 int[] maxSizes = 1582 getBase(CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES); 1583 float[] zoomRanges = getBase( 1584 CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES); 1585 Range<Float> zoomRange = getBase(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE); 1586 float maxDigitalZoom = getBase(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); 1587 1588 if (maxSizes == null) { 1589 return null; 1590 } 1591 if (maxSizes.length % 3 != 0) { 1592 throw new AssertionError("availableExtendedSceneModeMaxSizes must be tuples of " 1593 + "[mode, width, height]"); 1594 } 1595 int numExtendedSceneModes = maxSizes.length / 3; 1596 int numExtendedSceneModeZoomRanges = 0; 1597 if (zoomRanges != null) { 1598 if (zoomRanges.length % 2 != 0) { 1599 throw new AssertionError("availableExtendedSceneModeZoomRanges must be tuples of " 1600 + "[minZoom, maxZoom]"); 1601 } 1602 numExtendedSceneModeZoomRanges = zoomRanges.length / 2; 1603 if (numExtendedSceneModes - numExtendedSceneModeZoomRanges != 1) { 1604 throw new AssertionError("Number of extended scene mode zoom ranges must be 1 " 1605 + "less than number of supported modes"); 1606 } 1607 } 1608 1609 float modeOffMinZoomRatio = 1.0f; 1610 float modeOffMaxZoomRatio = maxDigitalZoom; 1611 if (zoomRange != null) { 1612 modeOffMinZoomRatio = zoomRange.getLower(); 1613 modeOffMaxZoomRatio = zoomRange.getUpper(); 1614 } 1615 1616 Capability[] capabilities = new Capability[numExtendedSceneModes]; 1617 for (int i = 0, j = 0; i < numExtendedSceneModes; i++) { 1618 int mode = maxSizes[3 * i]; 1619 int width = maxSizes[3 * i + 1]; 1620 int height = maxSizes[3 * i + 2]; 1621 if (mode != CameraMetadata.CONTROL_EXTENDED_SCENE_MODE_DISABLED 1622 && j < numExtendedSceneModeZoomRanges) { 1623 capabilities[i] = new Capability(mode, width, height, zoomRanges[2 * j], 1624 zoomRanges[2 * j + 1]); 1625 j++; 1626 } else { 1627 capabilities[i] = new Capability(mode, width, height, modeOffMinZoomRatio, 1628 modeOffMaxZoomRatio); 1629 } 1630 } 1631 1632 return capabilities; 1633 } 1634 setBase(CameraCharacteristics.Key<T> key, T value)1635 private <T> void setBase(CameraCharacteristics.Key<T> key, T value) { 1636 setBase(key.getNativeKey(), value); 1637 } 1638 setBase(CaptureResult.Key<T> key, T value)1639 private <T> void setBase(CaptureResult.Key<T> key, T value) { 1640 setBase(key.getNativeKey(), value); 1641 } 1642 setBase(CaptureRequest.Key<T> key, T value)1643 private <T> void setBase(CaptureRequest.Key<T> key, T value) { 1644 setBase(key.getNativeKey(), value); 1645 } 1646 setBase(Key<T> key, T value)1647 private <T> void setBase(Key<T> key, T value) { 1648 int tag; 1649 if (key.hasTag()) { 1650 tag = key.getTag(); 1651 } else { 1652 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.getName()); 1653 key.cacheTag(tag); 1654 } 1655 if (value == null) { 1656 // Erase the entry 1657 writeValues(tag, /*src*/null); 1658 return; 1659 } // else update the entry to a new value 1660 1661 int nativeType = nativeGetTypeFromTagLocal(mMetadataPtr, tag); 1662 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 1663 int size = marshaler.calculateMarshalSize(value); 1664 1665 // TODO: Optimization. Cache the byte[] and reuse if the size is big enough. 1666 byte[] values = new byte[size]; 1667 1668 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 1669 marshaler.marshal(value, buffer); 1670 1671 writeValues(tag, values); 1672 } 1673 1674 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 1675 // metadata. 1676 private static final HashMap<Key<?>, SetCommand> sSetCommandMap = 1677 new HashMap<Key<?>, SetCommand>(); 1678 static { CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey()1679 sSetCommandMap.put(CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), 1680 new SetCommand() { 1681 @Override 1682 public <T> void setValue(CameraMetadataNative metadata, T value) { 1683 metadata.setAvailableFormats((int[]) value); 1684 } 1685 }); CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey()1686 sSetCommandMap.put(CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), 1687 new SetCommand() { 1688 @Override 1689 public <T> void setValue(CameraMetadataNative metadata, T value) { 1690 metadata.setFaceRectangles((Rect[]) value); 1691 } 1692 }); CaptureResult.STATISTICS_FACES.getNativeKey()1693 sSetCommandMap.put(CaptureResult.STATISTICS_FACES.getNativeKey(), 1694 new SetCommand() { 1695 @Override 1696 public <T> void setValue(CameraMetadataNative metadata, T value) { 1697 metadata.setFaces((Face[])value); 1698 } 1699 }); CaptureRequest.TONEMAP_CURVE.getNativeKey()1700 sSetCommandMap.put(CaptureRequest.TONEMAP_CURVE.getNativeKey(), new SetCommand() { 1701 @Override 1702 public <T> void setValue(CameraMetadataNative metadata, T value) { 1703 metadata.setTonemapCurve((TonemapCurve) value); 1704 } 1705 }); CaptureResult.JPEG_GPS_LOCATION.getNativeKey()1706 sSetCommandMap.put(CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new SetCommand() { 1707 @Override 1708 public <T> void setValue(CameraMetadataNative metadata, T value) { 1709 metadata.setGpsLocation((Location) value); 1710 } 1711 }); CaptureRequest.SCALER_CROP_REGION.getNativeKey()1712 sSetCommandMap.put(CaptureRequest.SCALER_CROP_REGION.getNativeKey(), 1713 new SetCommand() { 1714 @Override 1715 public <T> void setValue(CameraMetadataNative metadata, T value) { 1716 metadata.setScalerCropRegion((Rect) value); 1717 } 1718 }); CaptureRequest.CONTROL_AWB_REGIONS.getNativeKey()1719 sSetCommandMap.put(CaptureRequest.CONTROL_AWB_REGIONS.getNativeKey(), 1720 new SetCommand() { 1721 @Override 1722 public <T> void setValue(CameraMetadataNative metadata, T value) { 1723 metadata.setAWBRegions(value); 1724 } 1725 }); CaptureRequest.CONTROL_AF_REGIONS.getNativeKey()1726 sSetCommandMap.put(CaptureRequest.CONTROL_AF_REGIONS.getNativeKey(), 1727 new SetCommand() { 1728 @Override 1729 public <T> void setValue(CameraMetadataNative metadata, T value) { 1730 metadata.setAFRegions(value); 1731 } 1732 }); CaptureRequest.CONTROL_AE_REGIONS.getNativeKey()1733 sSetCommandMap.put(CaptureRequest.CONTROL_AE_REGIONS.getNativeKey(), 1734 new SetCommand() { 1735 @Override 1736 public <T> void setValue(CameraMetadataNative metadata, T value) { 1737 metadata.setAERegions(value); 1738 } 1739 }); 1740 } 1741 setAvailableFormats(int[] value)1742 private boolean setAvailableFormats(int[] value) { 1743 int[] availableFormat = value; 1744 if (value == null) { 1745 // Let setBase() to handle the null value case. 1746 return false; 1747 } 1748 1749 int[] newValues = new int[availableFormat.length]; 1750 for (int i = 0; i < availableFormat.length; i++) { 1751 newValues[i] = availableFormat[i]; 1752 if (availableFormat[i] == ImageFormat.JPEG) { 1753 newValues[i] = NATIVE_JPEG_FORMAT; 1754 } 1755 } 1756 1757 setBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS, newValues); 1758 return true; 1759 } 1760 1761 /** 1762 * Convert Face Rectangles from managed side to native side as they have different definitions. 1763 * <p> 1764 * Managed side face rectangles are defined as: left, top, width, height. 1765 * Native side face rectangles are defined as: left, top, right, bottom. 1766 * The input face rectangle need to be converted to native side definition when set is called. 1767 * </p> 1768 * 1769 * @param faceRects Input face rectangles. 1770 * @return true if face rectangles can be set successfully. Otherwise, Let the caller 1771 * (setBase) to handle it appropriately. 1772 */ setFaceRectangles(Rect[] faceRects)1773 private boolean setFaceRectangles(Rect[] faceRects) { 1774 if (faceRects == null) { 1775 return false; 1776 } 1777 1778 Rect[] newFaceRects = new Rect[faceRects.length]; 1779 for (int i = 0; i < newFaceRects.length; i++) { 1780 newFaceRects[i] = new Rect( 1781 faceRects[i].left, 1782 faceRects[i].top, 1783 faceRects[i].right + faceRects[i].left, 1784 faceRects[i].bottom + faceRects[i].top); 1785 } 1786 1787 setBase(CaptureResult.STATISTICS_FACE_RECTANGLES, newFaceRects); 1788 return true; 1789 } 1790 setTonemapCurve(TonemapCurve tc)1791 private <T> boolean setTonemapCurve(TonemapCurve tc) { 1792 if (tc == null) { 1793 return false; 1794 } 1795 1796 float[][] curve = new float[3][]; 1797 for (int i = TonemapCurve.CHANNEL_RED; i <= TonemapCurve.CHANNEL_BLUE; i++) { 1798 int pointCount = tc.getPointCount(i); 1799 curve[i] = new float[pointCount * TonemapCurve.POINT_SIZE]; 1800 tc.copyColorCurve(i, curve[i], 0); 1801 } 1802 setBase(CaptureRequest.TONEMAP_CURVE_RED, curve[0]); 1803 setBase(CaptureRequest.TONEMAP_CURVE_GREEN, curve[1]); 1804 setBase(CaptureRequest.TONEMAP_CURVE_BLUE, curve[2]); 1805 1806 return true; 1807 } 1808 setScalerCropRegion(Rect cropRegion)1809 private <T> boolean setScalerCropRegion(Rect cropRegion) { 1810 if (cropRegion == null) { 1811 return false; 1812 } 1813 setBase(CaptureRequest.SCALER_CROP_REGION_SET, true); 1814 setBase(CaptureRequest.SCALER_CROP_REGION, cropRegion); 1815 return true; 1816 } 1817 setAFRegions(T afRegions)1818 private <T> boolean setAFRegions(T afRegions) { 1819 if (afRegions == null) { 1820 return false; 1821 } 1822 setBase(CaptureRequest.CONTROL_AF_REGIONS_SET, true); 1823 // The cast to CaptureRequest.Key is needed since java does not support template 1824 // specialization and we need to route this method to 1825 // setBase(CaptureRequest.Key<T> key, T value) 1826 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AF_REGIONS, afRegions); 1827 return true; 1828 } 1829 setAERegions(T aeRegions)1830 private <T> boolean setAERegions(T aeRegions) { 1831 if (aeRegions == null) { 1832 return false; 1833 } 1834 setBase(CaptureRequest.CONTROL_AE_REGIONS_SET, true); 1835 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AE_REGIONS, aeRegions); 1836 return true; 1837 } 1838 setAWBRegions(T awbRegions)1839 private <T> boolean setAWBRegions(T awbRegions) { 1840 if (awbRegions == null) { 1841 return false; 1842 } 1843 setBase(CaptureRequest.CONTROL_AWB_REGIONS_SET, true); 1844 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AWB_REGIONS, awbRegions); 1845 return true; 1846 } 1847 updateNativeAllocation()1848 private void updateNativeAllocation() { 1849 long currentBufferSize = nativeGetBufferSize(mMetadataPtr); 1850 1851 if (currentBufferSize != mBufferSize) { 1852 if (mBufferSize > 0) { 1853 VMRuntime.getRuntime().registerNativeFree(mBufferSize); 1854 } 1855 1856 mBufferSize = currentBufferSize; 1857 1858 if (mBufferSize > 0) { 1859 VMRuntime.getRuntime().registerNativeAllocation(mBufferSize); 1860 } 1861 } 1862 } 1863 1864 private int mCameraId = -1; 1865 private boolean mHasMandatoryConcurrentStreams = false; 1866 private Size mDisplaySize = new Size(0, 0); 1867 private long mBufferSize = 0; 1868 private MultiResolutionStreamConfigurationMap mMultiResolutionStreamConfigurationMap = null; 1869 1870 /** 1871 * Set the current camera Id. 1872 * 1873 * @param cameraId Current camera id. 1874 * 1875 * @hide 1876 */ setCameraId(int cameraId)1877 public void setCameraId(int cameraId) { 1878 mCameraId = cameraId; 1879 } 1880 1881 /** 1882 * Set the current camera Id. 1883 * 1884 * @param hasMandatoryConcurrentStreams whether the metadata advertises mandatory concurrent 1885 * streams. 1886 * 1887 * @hide 1888 */ setHasMandatoryConcurrentStreams(boolean hasMandatoryConcurrentStreams)1889 public void setHasMandatoryConcurrentStreams(boolean hasMandatoryConcurrentStreams) { 1890 mHasMandatoryConcurrentStreams = hasMandatoryConcurrentStreams; 1891 } 1892 1893 /** 1894 * Set the current display size. 1895 * 1896 * @param displaySize The current display size. 1897 * 1898 * @hide 1899 */ setDisplaySize(Size displaySize)1900 public void setDisplaySize(Size displaySize) { 1901 mDisplaySize = displaySize; 1902 } 1903 1904 /** 1905 * Set the multi-resolution stream configuration map. 1906 * 1907 * @param multiResolutionMap The multi-resolution stream configuration map. 1908 * 1909 * @hide 1910 */ setMultiResolutionStreamConfigurationMap( @onNull Map<String, StreamConfiguration[]> multiResolutionMap)1911 public void setMultiResolutionStreamConfigurationMap( 1912 @NonNull Map<String, StreamConfiguration[]> multiResolutionMap) { 1913 mMultiResolutionStreamConfigurationMap = 1914 new MultiResolutionStreamConfigurationMap(multiResolutionMap); 1915 } 1916 1917 /** 1918 * Get the multi-resolution stream configuration map. 1919 * 1920 * @return The multi-resolution stream configuration map. 1921 * 1922 * @hide 1923 */ getMultiResolutionStreamConfigurationMap()1924 public MultiResolutionStreamConfigurationMap getMultiResolutionStreamConfigurationMap() { 1925 return mMultiResolutionStreamConfigurationMap; 1926 } 1927 1928 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 1929 private long mMetadataPtr; // native std::shared_ptr<CameraMetadata>* 1930 1931 @FastNative nativeAllocate()1932 private static native long nativeAllocate(); 1933 @FastNative nativeAllocateCopy(long ptr)1934 private static native long nativeAllocateCopy(long ptr) 1935 throws NullPointerException; 1936 1937 1938 @FastNative nativeUpdate(long dst, long src)1939 private static native void nativeUpdate(long dst, long src); nativeWriteToParcel(Parcel dest, long ptr)1940 private static synchronized native void nativeWriteToParcel(Parcel dest, long ptr); nativeReadFromParcel(Parcel source, long ptr)1941 private static synchronized native void nativeReadFromParcel(Parcel source, long ptr); nativeSwap(long ptr, long otherPtr)1942 private static synchronized native void nativeSwap(long ptr, long otherPtr) 1943 throws NullPointerException; 1944 @FastNative nativeSetVendorId(long ptr, long vendorId)1945 private static native void nativeSetVendorId(long ptr, long vendorId); nativeClose(long ptr)1946 private static synchronized native void nativeClose(long ptr); nativeIsEmpty(long ptr)1947 private static synchronized native boolean nativeIsEmpty(long ptr); nativeGetEntryCount(long ptr)1948 private static synchronized native int nativeGetEntryCount(long ptr); nativeGetBufferSize(long ptr)1949 private static synchronized native long nativeGetBufferSize(long ptr); 1950 1951 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeReadValues(int tag, long ptr)1952 private static synchronized native byte[] nativeReadValues(int tag, long ptr); nativeWriteValues(int tag, byte[] src, long ptr)1953 private static synchronized native void nativeWriteValues(int tag, byte[] src, long ptr); nativeDump(long ptr)1954 private static synchronized native void nativeDump(long ptr) throws IOException; // dump to LOGD 1955 nativeGetAllVendorKeys(long ptr, Class keyClass)1956 private static synchronized native ArrayList nativeGetAllVendorKeys(long ptr, Class keyClass); 1957 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeGetTagFromKeyLocal(long ptr, String keyName)1958 private static synchronized native int nativeGetTagFromKeyLocal(long ptr, String keyName) 1959 throws IllegalArgumentException; 1960 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeGetTypeFromTagLocal(long ptr, int tag)1961 private static synchronized native int nativeGetTypeFromTagLocal(long ptr, int tag) 1962 throws IllegalArgumentException; 1963 @FastNative nativeGetTagFromKey(String keyName, long vendorId)1964 private static native int nativeGetTagFromKey(String keyName, long vendorId) 1965 throws IllegalArgumentException; 1966 @FastNative nativeGetTypeFromTag(int tag, long vendorId)1967 private static native int nativeGetTypeFromTag(int tag, long vendorId) 1968 throws IllegalArgumentException; 1969 1970 /** 1971 * <p>Perform a 0-copy swap of the internal metadata with another object.</p> 1972 * 1973 * <p>Useful to convert a CameraMetadata into e.g. a CaptureRequest.</p> 1974 * 1975 * @param other Metadata to swap with 1976 * @throws NullPointerException if other was null 1977 * @hide 1978 */ swap(CameraMetadataNative other)1979 public void swap(CameraMetadataNative other) { 1980 nativeSwap(mMetadataPtr, other.mMetadataPtr); 1981 mCameraId = other.mCameraId; 1982 mHasMandatoryConcurrentStreams = other.mHasMandatoryConcurrentStreams; 1983 mDisplaySize = other.mDisplaySize; 1984 mMultiResolutionStreamConfigurationMap = other.mMultiResolutionStreamConfigurationMap; 1985 updateNativeAllocation(); 1986 other.updateNativeAllocation(); 1987 } 1988 1989 /** 1990 * Set the native metadata vendor id. 1991 * 1992 * @hide 1993 */ setVendorId(long vendorId)1994 public void setVendorId(long vendorId) { 1995 nativeSetVendorId(mMetadataPtr, vendorId); 1996 } 1997 1998 /** 1999 * @hide 2000 */ getEntryCount()2001 public int getEntryCount() { 2002 return nativeGetEntryCount(mMetadataPtr); 2003 } 2004 2005 /** 2006 * Does this metadata contain at least 1 entry? 2007 * 2008 * @hide 2009 */ isEmpty()2010 public boolean isEmpty() { 2011 return nativeIsEmpty(mMetadataPtr); 2012 } 2013 2014 2015 /** 2016 * Retrieves the pointer to the native shared_ptr<CameraMetadata> as a Java long. 2017 * 2018 * @hide 2019 */ getMetadataPtr()2020 public long getMetadataPtr() { 2021 return mMetadataPtr; 2022 } 2023 2024 /** 2025 * Return a list containing keys of the given key class for all defined vendor tags. 2026 * 2027 * @hide 2028 */ getAllVendorKeys(Class<K> keyClass)2029 public <K> ArrayList<K> getAllVendorKeys(Class<K> keyClass) { 2030 if (keyClass == null) { 2031 throw new NullPointerException(); 2032 } 2033 return (ArrayList<K>) nativeGetAllVendorKeys(mMetadataPtr, keyClass); 2034 } 2035 2036 /** 2037 * Convert a key string into the equivalent native tag. 2038 * 2039 * @throws IllegalArgumentException if the key was not recognized 2040 * @throws NullPointerException if the key was null 2041 * 2042 * @hide 2043 */ getTag(String key)2044 public static int getTag(String key) { 2045 return nativeGetTagFromKey(key, Long.MAX_VALUE); 2046 } 2047 2048 /** 2049 * Convert a key string into the equivalent native tag. 2050 * 2051 * @throws IllegalArgumentException if the key was not recognized 2052 * @throws NullPointerException if the key was null 2053 * 2054 * @hide 2055 */ getTag(String key, long vendorId)2056 public static int getTag(String key, long vendorId) { 2057 return nativeGetTagFromKey(key, vendorId); 2058 } 2059 2060 /** 2061 * Get the underlying native type for a tag. 2062 * 2063 * @param tag An integer tag, see e.g. {@link #getTag} 2064 * @param vendorId A vendor tag provider id 2065 * @return An int enum for the metadata type, see e.g. {@link #TYPE_BYTE} 2066 * 2067 * @hide 2068 */ getNativeType(int tag, long vendorId)2069 public static int getNativeType(int tag, long vendorId) { 2070 return nativeGetTypeFromTag(tag, vendorId); 2071 } 2072 2073 /** 2074 * <p>Updates the existing entry for tag with the new bytes pointed by src, erasing 2075 * the entry if src was null.</p> 2076 * 2077 * <p>An empty array can be passed in to update the entry to 0 elements.</p> 2078 * 2079 * @param tag An integer tag, see e.g. {@link #getTag} 2080 * @param src An array of bytes, or null to erase the entry 2081 * 2082 * @hide 2083 */ writeValues(int tag, byte[] src)2084 public void writeValues(int tag, byte[] src) { 2085 nativeWriteValues(tag, src, mMetadataPtr); 2086 } 2087 2088 /** 2089 * <p>Returns a byte[] of data corresponding to this tag. Use a wrapped bytebuffer to unserialize 2090 * the data properly.</p> 2091 * 2092 * <p>An empty array can be returned to denote an existing entry with 0 elements.</p> 2093 * 2094 * @param tag An integer tag, see e.g. {@link #getTag} 2095 * 2096 * @return {@code null} if there were 0 entries for this tag, a byte[] otherwise. 2097 * @hide 2098 */ readValues(int tag)2099 public byte[] readValues(int tag) { 2100 // TODO: Optimization. Native code returns a ByteBuffer instead. 2101 return nativeReadValues(tag, mMetadataPtr); 2102 } 2103 2104 /** 2105 * Dumps the native metadata contents to logcat. 2106 * 2107 * <p>Visibility for testing/debugging only. The results will not 2108 * include any synthesized keys, as they are invisible to the native layer.</p> 2109 * 2110 * @hide 2111 */ dumpToLog()2112 public void dumpToLog() { 2113 try { 2114 nativeDump(mMetadataPtr); 2115 } catch (IOException e) { 2116 Log.wtf(TAG, "Dump logging failed", e); 2117 } 2118 } 2119 2120 @Override finalize()2121 protected void finalize() throws Throwable { 2122 try { 2123 close(); 2124 } finally { 2125 super.finalize(); 2126 } 2127 } 2128 2129 /** 2130 * Get the marshaler compatible with the {@code key} and type {@code T}. 2131 * 2132 * @throws UnsupportedOperationException 2133 * if the native/managed type combination for {@code key} is not supported 2134 */ getMarshalerForKey(Key<T> key, int nativeType)2135 private static <T> Marshaler<T> getMarshalerForKey(Key<T> key, int nativeType) { 2136 return MarshalRegistry.getMarshaler(key.getTypeReference(), 2137 nativeType); 2138 } 2139 2140 @SuppressWarnings({ "unchecked", "rawtypes" }) registerAllMarshalers()2141 private static void registerAllMarshalers() { 2142 if (DEBUG) { 2143 Log.v(TAG, "Shall register metadata marshalers"); 2144 } 2145 2146 MarshalQueryable[] queryList = new MarshalQueryable[] { 2147 // marshalers for standard types 2148 new MarshalQueryablePrimitive(), 2149 new MarshalQueryableEnum(), 2150 new MarshalQueryableArray(), 2151 2152 // pseudo standard types, that expand/narrow the native type into a managed type 2153 new MarshalQueryableBoolean(), 2154 new MarshalQueryableNativeByteToInteger(), 2155 2156 // marshalers for custom types 2157 new MarshalQueryableRect(), 2158 new MarshalQueryableSize(), 2159 new MarshalQueryableSizeF(), 2160 new MarshalQueryableString(), 2161 new MarshalQueryableReprocessFormatsMap(), 2162 new MarshalQueryableRange(), 2163 new MarshalQueryablePair(), 2164 new MarshalQueryableMeteringRectangle(), 2165 new MarshalQueryableColorSpaceTransform(), 2166 new MarshalQueryableStreamConfiguration(), 2167 new MarshalQueryableStreamConfigurationDuration(), 2168 new MarshalQueryableRggbChannelVector(), 2169 new MarshalQueryableBlackLevelPattern(), 2170 new MarshalQueryableHighSpeedVideoConfiguration(), 2171 new MarshalQueryableRecommendedStreamConfiguration(), 2172 2173 // generic parcelable marshaler (MUST BE LAST since it has lowest priority) 2174 new MarshalQueryableParcelable(), 2175 }; 2176 2177 for (MarshalQueryable query : queryList) { 2178 MarshalRegistry.registerMarshalQueryable(query); 2179 } 2180 if (DEBUG) { 2181 Log.v(TAG, "Registered metadata marshalers"); 2182 } 2183 } 2184 2185 /** Check if input arguments are all {@code null}. 2186 * 2187 * @param objs Input arguments for null check 2188 * @return {@code true} if input arguments are all {@code null}, otherwise {@code false} 2189 */ areValuesAllNull(Object... objs)2190 private static boolean areValuesAllNull(Object... objs) { 2191 for (Object o : objs) { 2192 if (o != null) return false; 2193 } 2194 return true; 2195 } 2196 2197 /** 2198 * Return the set of physical camera ids that this logical {@link CameraDevice} is made 2199 * up of. 2200 * 2201 * If the camera device isn't a logical camera, return an empty set. 2202 * 2203 * @hide 2204 */ getPhysicalCameraIds()2205 public Set<String> getPhysicalCameraIds() { 2206 int[] availableCapabilities = get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 2207 if (availableCapabilities == null) { 2208 throw new AssertionError("android.request.availableCapabilities must be non-null " 2209 + "in the characteristics"); 2210 } 2211 2212 if (!ArrayUtils.contains(availableCapabilities, 2213 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA)) { 2214 return Collections.emptySet(); 2215 } 2216 byte[] physicalCamIds = get(CameraCharacteristics.LOGICAL_MULTI_CAMERA_PHYSICAL_IDS); 2217 2218 String physicalCamIdString = null; 2219 try { 2220 physicalCamIdString = new String(physicalCamIds, "UTF-8"); 2221 } catch (java.io.UnsupportedEncodingException e) { 2222 throw new AssertionError("android.logicalCam.physicalIds must be UTF-8 string"); 2223 } 2224 String[] physicalCameraIdArray = physicalCamIdString.split("\0"); 2225 2226 return Collections.unmodifiableSet( 2227 new HashSet<String>(Arrays.asList(physicalCameraIdArray))); 2228 } 2229 2230 static { registerAllMarshalers()2231 registerAllMarshalers(); 2232 } 2233 } 2234