1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.impl; 18 19 import android.annotation.NonNull; 20 import android.compat.annotation.UnsupportedAppUsage; 21 import android.graphics.ImageFormat; 22 import android.graphics.Point; 23 import android.graphics.Rect; 24 import android.hardware.camera2.CameraCharacteristics; 25 import android.hardware.camera2.CameraMetadata; 26 import android.hardware.camera2.CaptureRequest; 27 import android.hardware.camera2.CaptureResult; 28 import android.hardware.camera2.marshal.MarshalQueryable; 29 import android.hardware.camera2.marshal.MarshalRegistry; 30 import android.hardware.camera2.marshal.Marshaler; 31 import android.hardware.camera2.marshal.impl.MarshalQueryableArray; 32 import android.hardware.camera2.marshal.impl.MarshalQueryableBlackLevelPattern; 33 import android.hardware.camera2.marshal.impl.MarshalQueryableBoolean; 34 import android.hardware.camera2.marshal.impl.MarshalQueryableColorSpaceTransform; 35 import android.hardware.camera2.marshal.impl.MarshalQueryableEnum; 36 import android.hardware.camera2.marshal.impl.MarshalQueryableHighSpeedVideoConfiguration; 37 import android.hardware.camera2.marshal.impl.MarshalQueryableMeteringRectangle; 38 import android.hardware.camera2.marshal.impl.MarshalQueryableNativeByteToInteger; 39 import android.hardware.camera2.marshal.impl.MarshalQueryablePair; 40 import android.hardware.camera2.marshal.impl.MarshalQueryableParcelable; 41 import android.hardware.camera2.marshal.impl.MarshalQueryablePrimitive; 42 import android.hardware.camera2.marshal.impl.MarshalQueryableRange; 43 import android.hardware.camera2.marshal.impl.MarshalQueryableRecommendedStreamConfiguration; 44 import android.hardware.camera2.marshal.impl.MarshalQueryableRect; 45 import android.hardware.camera2.marshal.impl.MarshalQueryableReprocessFormatsMap; 46 import android.hardware.camera2.marshal.impl.MarshalQueryableRggbChannelVector; 47 import android.hardware.camera2.marshal.impl.MarshalQueryableSize; 48 import android.hardware.camera2.marshal.impl.MarshalQueryableSizeF; 49 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfiguration; 50 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfigurationDuration; 51 import android.hardware.camera2.marshal.impl.MarshalQueryableString; 52 import android.hardware.camera2.params.Capability; 53 import android.hardware.camera2.params.ColorSpaceProfiles; 54 import android.hardware.camera2.params.DeviceStateSensorOrientationMap; 55 import android.hardware.camera2.params.DynamicRangeProfiles; 56 import android.hardware.camera2.params.Face; 57 import android.hardware.camera2.params.HighSpeedVideoConfiguration; 58 import android.hardware.camera2.params.LensIntrinsicsSample; 59 import android.hardware.camera2.params.LensShadingMap; 60 import android.hardware.camera2.params.MandatoryStreamCombination; 61 import android.hardware.camera2.params.MultiResolutionStreamConfigurationMap; 62 import android.hardware.camera2.params.OisSample; 63 import android.hardware.camera2.params.RecommendedStreamConfiguration; 64 import android.hardware.camera2.params.RecommendedStreamConfigurationMap; 65 import android.hardware.camera2.params.ReprocessFormatsMap; 66 import android.hardware.camera2.params.SharedSessionConfiguration; 67 import android.hardware.camera2.params.StreamConfiguration; 68 import android.hardware.camera2.params.StreamConfigurationDuration; 69 import android.hardware.camera2.params.StreamConfigurationMap; 70 import android.hardware.camera2.params.TonemapCurve; 71 import android.hardware.camera2.utils.ArrayUtils; 72 import android.hardware.camera2.utils.TypeReference; 73 import android.location.Location; 74 import android.location.LocationManager; 75 import android.os.Build; 76 import android.os.Parcel; 77 import android.os.Parcelable; 78 import android.os.ServiceSpecificException; 79 import android.util.Log; 80 import android.util.Range; 81 import android.util.Size; 82 83 import com.android.internal.camera.flags.Flags; 84 85 import dalvik.annotation.optimization.FastNative; 86 import dalvik.system.VMRuntime; 87 88 import java.io.IOException; 89 import java.nio.ByteBuffer; 90 import java.nio.ByteOrder; 91 import java.util.ArrayList; 92 import java.util.Arrays; 93 import java.util.Collections; 94 import java.util.HashMap; 95 import java.util.HashSet; 96 import java.util.List; 97 import java.util.Map; 98 import java.util.Objects; 99 import java.util.Set; 100 101 /** 102 * Implementation of camera metadata marshal/unmarshal across Binder to 103 * the camera service 104 */ 105 public class CameraMetadataNative implements Parcelable { 106 107 public static class Key<T> { 108 private boolean mHasTag; 109 private int mTag; 110 private long mVendorId = Long.MAX_VALUE; 111 private final Class<T> mType; 112 private final TypeReference<T> mTypeReference; 113 private final String mName; 114 private final String mFallbackName; 115 private final int mHash; 116 117 /** 118 * @hide 119 */ Key(String name, Class<T> type, long vendorId)120 public Key(String name, Class<T> type, long vendorId) { 121 if (name == null) { 122 throw new NullPointerException("Key needs a valid name"); 123 } else if (type == null) { 124 throw new NullPointerException("Type needs to be non-null"); 125 } 126 mName = name; 127 mFallbackName = null; 128 mType = type; 129 mVendorId = vendorId; 130 mTypeReference = TypeReference.createSpecializedTypeReference(type); 131 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 132 } 133 134 /** 135 * @hide 136 */ Key(String name, String fallbackName, Class<T> type)137 public Key(String name, String fallbackName, Class<T> type) { 138 if (name == null) { 139 throw new NullPointerException("Key needs a valid name"); 140 } else if (type == null) { 141 throw new NullPointerException("Type needs to be non-null"); 142 } 143 mName = name; 144 mFallbackName = fallbackName; 145 mType = type; 146 mTypeReference = TypeReference.createSpecializedTypeReference(type); 147 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 148 } 149 150 /** 151 * Visible for testing only. 152 * 153 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 154 * for application code or vendor-extended keys.</p> 155 */ Key(String name, Class<T> type)156 public Key(String name, Class<T> type) { 157 if (name == null) { 158 throw new NullPointerException("Key needs a valid name"); 159 } else if (type == null) { 160 throw new NullPointerException("Type needs to be non-null"); 161 } 162 mName = name; 163 mFallbackName = null; 164 mType = type; 165 mTypeReference = TypeReference.createSpecializedTypeReference(type); 166 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 167 } 168 169 /** 170 * Visible for testing only. 171 * 172 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 173 * for application code or vendor-extended keys.</p> 174 */ 175 @SuppressWarnings("unchecked") Key(String name, TypeReference<T> typeReference)176 public Key(String name, TypeReference<T> typeReference) { 177 if (name == null) { 178 throw new NullPointerException("Key needs a valid name"); 179 } else if (typeReference == null) { 180 throw new NullPointerException("TypeReference needs to be non-null"); 181 } 182 mName = name; 183 mFallbackName = null; 184 mType = (Class<T>)typeReference.getRawType(); 185 mTypeReference = typeReference; 186 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 187 } 188 189 /** 190 * Return a camelCase, period separated name formatted like: 191 * {@code "root.section[.subsections].name"}. 192 * 193 * <p>Built-in keys exposed by the Android SDK are always prefixed with {@code "android."}; 194 * keys that are device/platform-specific are prefixed with {@code "com."}.</p> 195 * 196 * <p>For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would 197 * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device 198 * specific key might look like {@code "com.google.nexus.data.private"}.</p> 199 * 200 * @return String representation of the key name 201 */ getName()202 public final String getName() { 203 return mName; 204 } 205 206 /** 207 * {@inheritDoc} 208 */ 209 @Override hashCode()210 public final int hashCode() { 211 return mHash; 212 } 213 214 /** 215 * Compare this key against other native keys, request keys, result keys, and 216 * characteristics keys. 217 * 218 * <p>Two keys are considered equal if their name and type reference are equal.</p> 219 * 220 * <p>Note that the equality against non-native keys is one-way. A native key may be equal 221 * to a result key; but that same result key will not be equal to a native key.</p> 222 */ 223 @SuppressWarnings("rawtypes") 224 @Override equals(Object o)225 public final boolean equals(Object o) { 226 if (this == o) { 227 return true; 228 } 229 230 if (o == null || this.hashCode() != o.hashCode()) { 231 return false; 232 } 233 234 Key<?> lhs; 235 236 if (o instanceof CaptureResult.Key) { 237 lhs = ((CaptureResult.Key)o).getNativeKey(); 238 } else if (o instanceof CaptureRequest.Key) { 239 lhs = ((CaptureRequest.Key)o).getNativeKey(); 240 } else if (o instanceof CameraCharacteristics.Key) { 241 lhs = ((CameraCharacteristics.Key)o).getNativeKey(); 242 } else if ((o instanceof Key)) { 243 lhs = (Key<?>)o; 244 } else { 245 return false; 246 } 247 248 return mName.equals(lhs.mName) && mTypeReference.equals(lhs.mTypeReference); 249 } 250 251 /** 252 * <p> 253 * Get the tag corresponding to this key. This enables insertion into the 254 * native metadata. 255 * </p> 256 * 257 * <p>This value is looked up the first time, and cached subsequently.</p> 258 * 259 * <p>This function may be called without cacheTag() if this is not a vendor key. 260 * If this is a vendor key, cacheTag() must be called first before getTag() can 261 * be called. Otherwise, mVendorId could be default (Long.MAX_VALUE) and vendor 262 * tag lookup could fail.</p> 263 * 264 * @return The tag numeric value corresponding to the string 265 */ 266 @UnsupportedAppUsage getTag()267 public final int getTag() { 268 if (!mHasTag) { 269 mTag = CameraMetadataNative.getTag(mName, mVendorId); 270 mHasTag = true; 271 } 272 return mTag; 273 } 274 275 /** 276 * Whether this key's tag is cached. 277 * 278 * @hide 279 */ 280 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) hasTag()281 public final boolean hasTag() { 282 return mHasTag; 283 } 284 285 /** 286 * Cache this key's tag. 287 * 288 * @hide 289 */ 290 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) cacheTag(int tag)291 public final void cacheTag(int tag) { 292 mHasTag = true; 293 mTag = tag; 294 } 295 296 /** 297 * Get the raw class backing the type {@code T} for this key. 298 * 299 * <p>The distinction is only important if {@code T} is a generic, e.g. 300 * {@code Range<Integer>} since the nested type will be erased.</p> 301 */ getType()302 public final Class<T> getType() { 303 // TODO: remove this; other places should use #getTypeReference() instead 304 return mType; 305 } 306 307 /** 308 * Get the vendor tag provider id. 309 * 310 * @hide 311 */ getVendorId()312 public final long getVendorId() { 313 return mVendorId; 314 } 315 316 /** 317 * Get the type reference backing the type {@code T} for this key. 318 * 319 * <p>The distinction is only important if {@code T} is a generic, e.g. 320 * {@code Range<Integer>} since the nested type will be retained.</p> 321 */ getTypeReference()322 public final TypeReference<T> getTypeReference() { 323 return mTypeReference; 324 } 325 } 326 327 private static final String TAG = "CameraMetadataJV"; 328 private static final boolean DEBUG = false; 329 330 // this should be in sync with HAL_PIXEL_FORMAT_BLOB defined in graphics.h 331 public static final int NATIVE_JPEG_FORMAT = 0x21; 332 333 private static final String CELLID_PROCESS = "CELLID"; 334 private static final String GPS_PROCESS = "GPS"; 335 private static final int FACE_LANDMARK_SIZE = 6; 336 337 private static final int MANDATORY_STREAM_CONFIGURATIONS_DEFAULT = 0; 338 private static final int MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION = 1; 339 private static final int MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT = 2; 340 private static final int MANDATORY_STREAM_CONFIGURATIONS_10BIT = 3; 341 private static final int MANDATORY_STREAM_CONFIGURATIONS_USE_CASE = 4; 342 private static final int MANDATORY_STREAM_CONFIGURATIONS_PREVIEW_STABILIZATION = 5; 343 translateLocationProviderToProcess(final String provider)344 private static String translateLocationProviderToProcess(final String provider) { 345 if (provider == null) { 346 return null; 347 } 348 switch(provider) { 349 case LocationManager.GPS_PROVIDER: 350 return GPS_PROCESS; 351 case LocationManager.NETWORK_PROVIDER: 352 return CELLID_PROCESS; 353 default: 354 return null; 355 } 356 } 357 translateProcessToLocationProvider(final String process)358 private static String translateProcessToLocationProvider(final String process) { 359 if (process == null) { 360 return null; 361 } 362 switch(process) { 363 case GPS_PROCESS: 364 return LocationManager.GPS_PROVIDER; 365 case CELLID_PROCESS: 366 return LocationManager.NETWORK_PROVIDER; 367 default: 368 return null; 369 } 370 } 371 CameraMetadataNative()372 public CameraMetadataNative() { 373 super(); 374 mMetadataPtr = nativeAllocate(); 375 if (mMetadataPtr == 0) { 376 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 377 } 378 updateNativeAllocation(); 379 } 380 381 /** 382 * Copy constructor - clone metadata 383 */ CameraMetadataNative(CameraMetadataNative other)384 public CameraMetadataNative(CameraMetadataNative other) { 385 super(); 386 mMetadataPtr = nativeAllocateCopy(other.mMetadataPtr); 387 if (mMetadataPtr == 0) { 388 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 389 } 390 updateNativeAllocation(); 391 } 392 393 /** 394 * Take ownership of native metadata 395 */ CameraMetadataNative(long metadataPtr)396 public CameraMetadataNative(long metadataPtr) { 397 super(); 398 mMetadataPtr = metadataPtr; 399 if (mMetadataPtr == 0) { 400 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 401 } 402 updateNativeAllocation(); 403 } 404 405 /** 406 * Move the contents from {@code other} into a new camera metadata instance.</p> 407 * 408 * <p>After this call, {@code other} will become empty.</p> 409 * 410 * @param other the previous metadata instance which will get pilfered 411 * @return a new metadata instance with the values from {@code other} moved into it 412 */ move(CameraMetadataNative other)413 public static CameraMetadataNative move(CameraMetadataNative other) { 414 CameraMetadataNative newObject = new CameraMetadataNative(); 415 newObject.swap(other); 416 return newObject; 417 } 418 419 /** 420 * Set all metadata values in the destination argument by using the corresponding 421 * values from the source. Metadata tags present in the destination and absent 422 * from the source will remain unmodified. 423 * 424 * @param dst Destination metadata 425 * @param src Source metadata 426 * @hide 427 */ update(CameraMetadataNative dst, CameraMetadataNative src)428 public static void update(CameraMetadataNative dst, CameraMetadataNative src) { 429 nativeUpdate(dst.mMetadataPtr, src.mMetadataPtr); 430 } 431 432 public static final @android.annotation.NonNull Parcelable.Creator<CameraMetadataNative> CREATOR = 433 new Parcelable.Creator<CameraMetadataNative>() { 434 @Override 435 public CameraMetadataNative createFromParcel(Parcel in) { 436 CameraMetadataNative metadata = new CameraMetadataNative(); 437 metadata.readFromParcel(in); 438 return metadata; 439 } 440 441 @Override 442 public CameraMetadataNative[] newArray(int size) { 443 return new CameraMetadataNative[size]; 444 } 445 }; 446 447 @Override describeContents()448 public int describeContents() { 449 return 0; 450 } 451 452 @Override writeToParcel(Parcel dest, int flags)453 public synchronized void writeToParcel(Parcel dest, int flags) { 454 nativeWriteToParcel(dest, mMetadataPtr); 455 } 456 457 /** 458 * @hide 459 */ get(CameraCharacteristics.Key<T> key)460 public <T> T get(CameraCharacteristics.Key<T> key) { 461 return get(key.getNativeKey()); 462 } 463 464 /** 465 * @hide 466 */ get(CaptureResult.Key<T> key)467 public <T> T get(CaptureResult.Key<T> key) { 468 return get(key.getNativeKey()); 469 } 470 471 /** 472 * @hide 473 */ get(CaptureRequest.Key<T> key)474 public <T> T get(CaptureRequest.Key<T> key) { 475 return get(key.getNativeKey()); 476 } 477 478 /** 479 * Look-up a metadata field value by its key. 480 * 481 * @param key a non-{@code null} key instance 482 * @return the field corresponding to the {@code key}, or {@code null} if no value was set 483 */ get(Key<T> key)484 public <T> T get(Key<T> key) { 485 Objects.requireNonNull(key, "key must not be null"); 486 487 // Check if key has been overridden to use a wrapper class on the java side. 488 GetCommand g = sGetCommandMap.get(key); 489 if (g != null) { 490 return g.getValue(this, key); 491 } 492 return getBase(key); 493 } 494 readFromParcel(Parcel in)495 public synchronized void readFromParcel(Parcel in) { 496 nativeReadFromParcel(in, mMetadataPtr); 497 updateNativeAllocation(); 498 } 499 500 /** 501 * Set the global client-side vendor tag descriptor to allow use of vendor 502 * tags in camera applications. 503 * 504 * @throws ServiceSpecificException 505 * @hide 506 */ setupGlobalVendorTagDescriptor()507 public static void setupGlobalVendorTagDescriptor() throws ServiceSpecificException { 508 int err = nativeSetupGlobalVendorTagDescriptor(); 509 if (err != 0) { 510 throw new ServiceSpecificException(err, "Failure to set up global vendor tags"); 511 } 512 } 513 514 /** 515 * Set the global client-side vendor tag descriptor to allow use of vendor 516 * tags in camera applications. 517 * 518 * @return int An error code corresponding to one of the 519 * {@link ICameraService} error constants, or 0 on success. 520 */ nativeSetupGlobalVendorTagDescriptor()521 private static native int nativeSetupGlobalVendorTagDescriptor(); 522 523 /** 524 * Set a camera metadata field to a value. The field definitions can be 525 * found in {@link CameraCharacteristics}, {@link CaptureResult}, and 526 * {@link CaptureRequest}. 527 * 528 * @param key The metadata field to write. 529 * @param value The value to set the field to, which must be of a matching 530 * type to the key. 531 */ set(Key<T> key, T value)532 public <T> void set(Key<T> key, T value) { 533 SetCommand s = sSetCommandMap.get(key); 534 if (s != null) { 535 s.setValue(this, value); 536 return; 537 } 538 539 setBase(key, value); 540 } 541 set(CaptureRequest.Key<T> key, T value)542 public <T> void set(CaptureRequest.Key<T> key, T value) { 543 set(key.getNativeKey(), value); 544 } 545 set(CaptureResult.Key<T> key, T value)546 public <T> void set(CaptureResult.Key<T> key, T value) { 547 set(key.getNativeKey(), value); 548 } 549 set(CameraCharacteristics.Key<T> key, T value)550 public <T> void set(CameraCharacteristics.Key<T> key, T value) { 551 set(key.getNativeKey(), value); 552 } 553 554 // Keep up-to-date with camera_metadata.h 555 /** 556 * @hide 557 */ 558 public static final int TYPE_BYTE = 0; 559 /** 560 * @hide 561 */ 562 public static final int TYPE_INT32 = 1; 563 /** 564 * @hide 565 */ 566 public static final int TYPE_FLOAT = 2; 567 /** 568 * @hide 569 */ 570 public static final int TYPE_INT64 = 3; 571 /** 572 * @hide 573 */ 574 public static final int TYPE_DOUBLE = 4; 575 /** 576 * @hide 577 */ 578 public static final int TYPE_RATIONAL = 5; 579 /** 580 * @hide 581 */ 582 public static final int NUM_TYPES = 6; 583 close()584 private void close() { 585 // Delete native pointer, but does not clear it 586 nativeClose(mMetadataPtr); 587 mMetadataPtr = 0; 588 589 if (mBufferSize > 0) { 590 VMRuntime.getRuntime().registerNativeFree(mBufferSize); 591 } 592 mBufferSize = 0; 593 } 594 getBase(CameraCharacteristics.Key<T> key)595 private <T> T getBase(CameraCharacteristics.Key<T> key) { 596 return getBase(key.getNativeKey()); 597 } 598 getBase(CaptureResult.Key<T> key)599 private <T> T getBase(CaptureResult.Key<T> key) { 600 return getBase(key.getNativeKey()); 601 } 602 getBase(CaptureRequest.Key<T> key)603 private <T> T getBase(CaptureRequest.Key<T> key) { 604 return getBase(key.getNativeKey()); 605 } 606 getBase(Key<T> key)607 private <T> T getBase(Key<T> key) { 608 int tag, nativeType; 609 byte[] values = null; 610 synchronized (this) { 611 if (key.hasTag()) { 612 tag = key.getTag(); 613 } else { 614 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.getName()); 615 key.cacheTag(tag); 616 } 617 values = readValues(tag); 618 if (values == null) { 619 // If the key returns null, use the fallback key if exists. 620 // This is to support old key names for the newly published keys. 621 if (key.mFallbackName == null) { 622 return null; 623 } 624 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.mFallbackName); 625 values = readValues(tag); 626 if (values == null) { 627 return null; 628 } 629 } 630 631 nativeType = nativeGetTypeFromTagLocal(mMetadataPtr, tag); 632 } 633 // This block of code doesn't need to be synchronized since we aren't writing or reading 634 // from the metadata buffer for this instance of CameraMetadataNative. 635 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 636 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 637 return marshaler.unmarshal(buffer); 638 } 639 640 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 641 // metadata. 642 private static final HashMap<Key<?>, GetCommand> sGetCommandMap = 643 new HashMap<Key<?>, GetCommand>(); 644 static { 645 sGetCommandMap.put( GetCommand()646 CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), new GetCommand() { 647 @Override 648 @SuppressWarnings("unchecked") 649 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 650 return (T) metadata.getAvailableFormats(); 651 } 652 }); 653 sGetCommandMap.put( GetCommand()654 CaptureResult.STATISTICS_FACES.getNativeKey(), new GetCommand() { 655 @Override 656 @SuppressWarnings("unchecked") 657 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 658 return (T) metadata.getFaces(); 659 } 660 }); 661 sGetCommandMap.put( GetCommand()662 CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), new GetCommand() { 663 @Override 664 @SuppressWarnings("unchecked") 665 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 666 return (T) metadata.getFaceRectangles(); 667 } 668 }); 669 sGetCommandMap.put( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey()670 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey(), 671 new GetCommand() { 672 @Override 673 @SuppressWarnings("unchecked") 674 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 675 return (T) metadata.getStreamConfigurationMap(); 676 } 677 }); 678 sGetCommandMap.put( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION.getNativeKey()679 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION.getNativeKey(), 680 new GetCommand() { 681 @Override 682 @SuppressWarnings("unchecked") 683 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 684 return (T) metadata.getStreamConfigurationMapMaximumResolution(); 685 } 686 }); 687 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey()688 CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey(), 689 new GetCommand() { 690 @Override 691 @SuppressWarnings("unchecked") 692 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 693 return (T) metadata.getMandatoryStreamCombinations(); 694 } 695 }); 696 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_CONCURRENT_STREAM_COMBINATIONS.getNativeKey()697 CameraCharacteristics.SCALER_MANDATORY_CONCURRENT_STREAM_COMBINATIONS.getNativeKey(), 698 new GetCommand() { 699 @Override 700 @SuppressWarnings("unchecked") 701 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 702 return (T) metadata.getMandatoryConcurrentStreamCombinations(); 703 } 704 }); 705 706 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_TEN_BIT_OUTPUT_STREAM_COMBINATIONS.getNativeKey()707 CameraCharacteristics.SCALER_MANDATORY_TEN_BIT_OUTPUT_STREAM_COMBINATIONS.getNativeKey(), 708 new GetCommand() { 709 @Override 710 @SuppressWarnings("unchecked") 711 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 712 return (T) metadata.getMandatory10BitStreamCombinations(); 713 } 714 }); 715 716 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_MAXIMUM_RESOLUTION_STREAM_COMBINATIONS.getNativeKey()717 CameraCharacteristics.SCALER_MANDATORY_MAXIMUM_RESOLUTION_STREAM_COMBINATIONS.getNativeKey(), 718 new GetCommand() { 719 @Override 720 @SuppressWarnings("unchecked") 721 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 722 return (T) metadata.getMandatoryMaximumResolutionStreamCombinations(); 723 } 724 }); 725 726 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_USE_CASE_STREAM_COMBINATIONS.getNativeKey()727 CameraCharacteristics.SCALER_MANDATORY_USE_CASE_STREAM_COMBINATIONS.getNativeKey(), 728 new GetCommand() { 729 @Override 730 @SuppressWarnings("unchecked") 731 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 732 return (T) metadata.getMandatoryUseCaseStreamCombinations(); 733 } 734 }); 735 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_PREVIEW_STABILIZATION_OUTPUT_STREAM_COMBINATIONS.getNativeKey()736 CameraCharacteristics.SCALER_MANDATORY_PREVIEW_STABILIZATION_OUTPUT_STREAM_COMBINATIONS.getNativeKey(), 737 new GetCommand() { 738 @Override 739 @SuppressWarnings("unchecked") 740 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 741 return (T) metadata.getMandatoryPreviewStabilizationStreamCombinations(); 742 } 743 }); 744 745 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey()746 CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey(), new GetCommand() { 747 @Override 748 @SuppressWarnings("unchecked") 749 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 750 return (T) metadata.getMaxRegions(key); 751 } 752 }); 753 sGetCommandMap.put( GetCommand()754 CameraCharacteristics.CONTROL_MAX_REGIONS_AWB.getNativeKey(), new GetCommand() { 755 @Override 756 @SuppressWarnings("unchecked") 757 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 758 return (T) metadata.getMaxRegions(key); 759 } 760 }); 761 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey()762 CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey(), new GetCommand() { 763 @Override 764 @SuppressWarnings("unchecked") 765 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 766 return (T) metadata.getMaxRegions(key); 767 } 768 }); 769 sGetCommandMap.put( GetCommand()770 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW.getNativeKey(), new GetCommand() { 771 @Override 772 @SuppressWarnings("unchecked") 773 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 774 return (T) metadata.getMaxNumOutputs(key); 775 } 776 }); 777 sGetCommandMap.put( GetCommand()778 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC.getNativeKey(), new GetCommand() { 779 @Override 780 @SuppressWarnings("unchecked") 781 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 782 return (T) metadata.getMaxNumOutputs(key); 783 } 784 }); 785 sGetCommandMap.put( CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey()786 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey(), 787 new GetCommand() { 788 @Override 789 @SuppressWarnings("unchecked") 790 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 791 return (T) metadata.getMaxNumOutputs(key); 792 } 793 }); 794 sGetCommandMap.put( GetCommand()795 CaptureRequest.TONEMAP_CURVE.getNativeKey(), new GetCommand() { 796 @Override 797 @SuppressWarnings("unchecked") 798 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 799 return (T) metadata.getTonemapCurve(); 800 } 801 }); 802 sGetCommandMap.put( GetCommand()803 CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new GetCommand() { 804 @Override 805 @SuppressWarnings("unchecked") 806 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 807 return (T) metadata.getGpsLocation(); 808 } 809 }); 810 sGetCommandMap.put( CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey()811 CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey(), 812 new GetCommand() { 813 @Override 814 @SuppressWarnings("unchecked") 815 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 816 return (T) metadata.getLensShadingMap(); 817 } 818 }); 819 sGetCommandMap.put( CameraCharacteristics.INFO_DEVICE_STATE_SENSOR_ORIENTATION_MAP.getNativeKey()820 CameraCharacteristics.INFO_DEVICE_STATE_SENSOR_ORIENTATION_MAP.getNativeKey(), 821 new GetCommand() { 822 @Override 823 @SuppressWarnings("unchecked") 824 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 825 return (T) metadata.getDeviceStateOrientationMap(); 826 } 827 }); 828 sGetCommandMap.put( CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES.getNativeKey()829 CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES.getNativeKey(), 830 new GetCommand() { 831 @Override 832 @SuppressWarnings("unchecked") 833 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 834 return (T) metadata.getDynamicRangeProfiles(); 835 } 836 }); 837 sGetCommandMap.put( CameraCharacteristics.REQUEST_AVAILABLE_COLOR_SPACE_PROFILES.getNativeKey()838 CameraCharacteristics.REQUEST_AVAILABLE_COLOR_SPACE_PROFILES.getNativeKey(), 839 new GetCommand() { 840 @Override 841 @SuppressWarnings("unchecked") 842 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 843 return (T) metadata.getColorSpaceProfiles(); 844 } 845 }); 846 sGetCommandMap.put( CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey()847 CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey(), 848 new GetCommand() { 849 @Override 850 @SuppressWarnings("unchecked") 851 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 852 return (T) metadata.getOisSamples(); 853 } 854 }); 855 sGetCommandMap.put( CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES.getNativeKey()856 CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES.getNativeKey(), 857 new GetCommand() { 858 @Override 859 @SuppressWarnings("unchecked") 860 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 861 return (T) metadata.getExtendedSceneModeCapabilities(); 862 } 863 }); 864 sGetCommandMap.put( CameraCharacteristics.SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP.getNativeKey()865 CameraCharacteristics.SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP.getNativeKey(), 866 new GetCommand() { 867 @Override 868 @SuppressWarnings("unchecked") 869 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 870 return (T) metadata.getMultiResolutionStreamConfigurationMap(); 871 } 872 }); 873 sGetCommandMap.put( CaptureResult.STATISTICS_LENS_INTRINSICS_SAMPLES.getNativeKey()874 CaptureResult.STATISTICS_LENS_INTRINSICS_SAMPLES.getNativeKey(), 875 new GetCommand() { 876 @Override 877 @SuppressWarnings("unchecked") 878 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 879 return (T) metadata.getLensIntrinsicSamples(); 880 } 881 }); 882 sGetCommandMap.put( CameraCharacteristics.SHARED_SESSION_CONFIGURATION.getNativeKey()883 CameraCharacteristics.SHARED_SESSION_CONFIGURATION.getNativeKey(), 884 new GetCommand() { 885 @Override 886 @SuppressWarnings("unchecked") 887 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 888 return (T) metadata.getSharedSessionConfiguration(); 889 } 890 }); 891 } 892 getAvailableFormats()893 private int[] getAvailableFormats() { 894 int[] availableFormats = getBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS); 895 if (availableFormats != null) { 896 for (int i = 0; i < availableFormats.length; i++) { 897 // JPEG has different value between native and managed side, need override. 898 if (availableFormats[i] == NATIVE_JPEG_FORMAT) { 899 availableFormats[i] = ImageFormat.JPEG; 900 } 901 } 902 } 903 904 return availableFormats; 905 } 906 setFaces(Face[] faces)907 private boolean setFaces(Face[] faces) { 908 if (faces == null) { 909 return false; 910 } 911 912 int numFaces = faces.length; 913 914 // Detect if all faces are SIMPLE or not; count # of valid faces 915 boolean fullMode = true; 916 for (Face face : faces) { 917 if (face == null) { 918 numFaces--; 919 Log.w(TAG, "setFaces - null face detected, skipping"); 920 continue; 921 } 922 923 if (face.getId() == Face.ID_UNSUPPORTED) { 924 fullMode = false; 925 } 926 } 927 928 Rect[] faceRectangles = new Rect[numFaces]; 929 byte[] faceScores = new byte[numFaces]; 930 int[] faceIds = null; 931 int[] faceLandmarks = null; 932 933 if (fullMode) { 934 faceIds = new int[numFaces]; 935 faceLandmarks = new int[numFaces * FACE_LANDMARK_SIZE]; 936 } 937 938 int i = 0; 939 for (Face face : faces) { 940 if (face == null) { 941 continue; 942 } 943 944 faceRectangles[i] = face.getBounds(); 945 faceScores[i] = (byte)face.getScore(); 946 947 if (fullMode) { 948 faceIds[i] = face.getId(); 949 950 int j = 0; 951 952 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().x; 953 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().y; 954 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().x; 955 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().y; 956 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().x; 957 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().y; 958 } 959 960 i++; 961 } 962 963 set(CaptureResult.STATISTICS_FACE_RECTANGLES, faceRectangles); 964 set(CaptureResult.STATISTICS_FACE_IDS, faceIds); 965 set(CaptureResult.STATISTICS_FACE_LANDMARKS, faceLandmarks); 966 set(CaptureResult.STATISTICS_FACE_SCORES, faceScores); 967 968 return true; 969 } 970 getFaces()971 private Face[] getFaces() { 972 Integer faceDetectMode = get(CaptureResult.STATISTICS_FACE_DETECT_MODE); 973 byte[] faceScores = get(CaptureResult.STATISTICS_FACE_SCORES); 974 Rect[] faceRectangles = get(CaptureResult.STATISTICS_FACE_RECTANGLES); 975 int[] faceIds = get(CaptureResult.STATISTICS_FACE_IDS); 976 int[] faceLandmarks = get(CaptureResult.STATISTICS_FACE_LANDMARKS); 977 978 if (areValuesAllNull(faceDetectMode, faceScores, faceRectangles, faceIds, faceLandmarks)) { 979 return null; 980 } 981 982 if (faceDetectMode == null) { 983 Log.w(TAG, "Face detect mode metadata is null, assuming the mode is SIMPLE"); 984 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 985 } else if (faceDetectMode > CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 986 // Face detect mode is larger than FULL, assuming the mode is FULL 987 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL; 988 } else { 989 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) { 990 return new Face[0]; 991 } 992 if (faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE && 993 faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 994 Log.w(TAG, "Unknown face detect mode: " + faceDetectMode); 995 return new Face[0]; 996 } 997 } 998 999 // Face scores and rectangles are required by SIMPLE and FULL mode. 1000 if (faceScores == null || faceRectangles == null) { 1001 Log.w(TAG, "Expect face scores and rectangles to be non-null"); 1002 return new Face[0]; 1003 } else if (faceScores.length != faceRectangles.length) { 1004 Log.w(TAG, String.format("Face score size(%d) doesn match face rectangle size(%d)!", 1005 faceScores.length, faceRectangles.length)); 1006 } 1007 1008 // To be safe, make number of faces is the minimal of all face info metadata length. 1009 int numFaces = Math.min(faceScores.length, faceRectangles.length); 1010 // Face id and landmarks are only required by FULL mode. 1011 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 1012 if (faceIds == null || faceLandmarks == null) { 1013 Log.w(TAG, "Expect face ids and landmarks to be non-null for FULL mode," + 1014 "fallback to SIMPLE mode"); 1015 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 1016 } else { 1017 if (faceIds.length != numFaces || 1018 faceLandmarks.length != numFaces * FACE_LANDMARK_SIZE) { 1019 Log.w(TAG, String.format("Face id size(%d), or face landmark size(%d) don't" + 1020 "match face number(%d)!", 1021 faceIds.length, faceLandmarks.length * FACE_LANDMARK_SIZE, numFaces)); 1022 } 1023 // To be safe, make number of faces is the minimal of all face info metadata length. 1024 numFaces = Math.min(numFaces, faceIds.length); 1025 numFaces = Math.min(numFaces, faceLandmarks.length / FACE_LANDMARK_SIZE); 1026 } 1027 } 1028 1029 ArrayList<Face> faceList = new ArrayList<Face>(); 1030 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) { 1031 for (int i = 0; i < numFaces; i++) { 1032 if (faceScores[i] <= Face.SCORE_MAX && 1033 faceScores[i] >= Face.SCORE_MIN) { 1034 faceList.add(new Face(faceRectangles[i], faceScores[i])); 1035 } 1036 } 1037 } else { 1038 // CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL 1039 for (int i = 0; i < numFaces; i++) { 1040 if (faceScores[i] <= Face.SCORE_MAX && 1041 faceScores[i] >= Face.SCORE_MIN && 1042 faceIds[i] >= 0) { 1043 Point leftEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE], 1044 faceLandmarks[i*FACE_LANDMARK_SIZE+1]); 1045 Point rightEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+2], 1046 faceLandmarks[i*FACE_LANDMARK_SIZE+3]); 1047 Point mouth = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+4], 1048 faceLandmarks[i*FACE_LANDMARK_SIZE+5]); 1049 Face face = new Face(faceRectangles[i], faceScores[i], faceIds[i], 1050 leftEye, rightEye, mouth); 1051 faceList.add(face); 1052 } 1053 } 1054 } 1055 Face[] faces = new Face[faceList.size()]; 1056 faceList.toArray(faces); 1057 return faces; 1058 } 1059 1060 // Face rectangles are defined as (left, top, right, bottom) instead of 1061 // (left, top, width, height) at the native level, so the normal Rect 1062 // conversion that does (l, t, w, h) -> (l, t, r, b) is unnecessary. Undo 1063 // that conversion here for just the faces. getFaceRectangles()1064 private Rect[] getFaceRectangles() { 1065 Rect[] faceRectangles = getBase(CaptureResult.STATISTICS_FACE_RECTANGLES); 1066 if (faceRectangles == null) return null; 1067 1068 Rect[] fixedFaceRectangles = new Rect[faceRectangles.length]; 1069 for (int i = 0; i < faceRectangles.length; i++) { 1070 fixedFaceRectangles[i] = new Rect( 1071 faceRectangles[i].left, 1072 faceRectangles[i].top, 1073 faceRectangles[i].right - faceRectangles[i].left, 1074 faceRectangles[i].bottom - faceRectangles[i].top); 1075 } 1076 return fixedFaceRectangles; 1077 } 1078 setLensShadingMap(LensShadingMap lensShadingMap)1079 private boolean setLensShadingMap(LensShadingMap lensShadingMap) { 1080 if (lensShadingMap == null) { 1081 return false; 1082 } 1083 float[] lsmArray = new float[lensShadingMap.getGainFactorCount()]; 1084 lensShadingMap.copyGainFactors(lsmArray, 0); 1085 setBase(CaptureResult.STATISTICS_LENS_SHADING_MAP, lsmArray); 1086 1087 Size s = new Size(lensShadingMap.getRowCount(), lensShadingMap.getColumnCount()); 1088 setBase(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE, s); 1089 return true; 1090 } 1091 getLensShadingMap()1092 private LensShadingMap getLensShadingMap() { 1093 float[] lsmArray = getBase(CaptureResult.STATISTICS_LENS_SHADING_MAP); 1094 Size s = get(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE); 1095 1096 // Do not warn if lsmArray is null while s is not. This is valid. 1097 if (lsmArray == null) { 1098 return null; 1099 } 1100 1101 if (s == null) { 1102 Log.w(TAG, "getLensShadingMap - Lens shading map size was null."); 1103 return null; 1104 } 1105 1106 LensShadingMap map = new LensShadingMap(lsmArray, s.getHeight(), s.getWidth()); 1107 return map; 1108 } 1109 getDeviceStateOrientationMap()1110 private DeviceStateSensorOrientationMap getDeviceStateOrientationMap() { 1111 long[] mapArray = getBase(CameraCharacteristics.INFO_DEVICE_STATE_ORIENTATIONS); 1112 1113 // Do not warn if map is null while s is not. This is valid. 1114 if (mapArray == null) { 1115 return null; 1116 } 1117 1118 DeviceStateSensorOrientationMap map = new DeviceStateSensorOrientationMap(mapArray); 1119 return map; 1120 } 1121 getDynamicRangeProfiles()1122 private DynamicRangeProfiles getDynamicRangeProfiles() { 1123 long[] profileArray = getBase( 1124 CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP); 1125 1126 if (profileArray == null) { 1127 return null; 1128 } 1129 1130 return new DynamicRangeProfiles(profileArray); 1131 } 1132 getColorSpaceProfiles()1133 private ColorSpaceProfiles getColorSpaceProfiles() { 1134 long[] profileArray = getBase( 1135 CameraCharacteristics.REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP); 1136 1137 if (profileArray == null) { 1138 return null; 1139 } 1140 1141 return new ColorSpaceProfiles(profileArray); 1142 } 1143 getGpsLocation()1144 private Location getGpsLocation() { 1145 String processingMethod = get(CaptureResult.JPEG_GPS_PROCESSING_METHOD); 1146 double[] coords = get(CaptureResult.JPEG_GPS_COORDINATES); 1147 Long timeStamp = get(CaptureResult.JPEG_GPS_TIMESTAMP); 1148 1149 if (areValuesAllNull(processingMethod, coords, timeStamp)) { 1150 return null; 1151 } 1152 1153 Location l = new Location(translateProcessToLocationProvider(processingMethod)); 1154 if (timeStamp != null) { 1155 // Location expects timestamp in [ms.] 1156 l.setTime(timeStamp * 1000); 1157 } else { 1158 Log.w(TAG, "getGpsLocation - No timestamp for GPS location."); 1159 } 1160 1161 if (coords != null) { 1162 l.setLatitude(coords[0]); 1163 l.setLongitude(coords[1]); 1164 l.setAltitude(coords[2]); 1165 } else { 1166 Log.w(TAG, "getGpsLocation - No coordinates for GPS location"); 1167 } 1168 1169 return l; 1170 } 1171 setGpsLocation(Location l)1172 private boolean setGpsLocation(Location l) { 1173 if (l == null) { 1174 // If Location value being set is null, remove corresponding keys. 1175 // This is safe because api1/client2/CameraParameters.cpp already erases 1176 // the keys for JPEG_GPS_LOCATION for certain cases. 1177 setBase(CaptureRequest.JPEG_GPS_TIMESTAMP, null); 1178 setBase(CaptureRequest.JPEG_GPS_COORDINATES, null); 1179 setBase(CaptureRequest.JPEG_GPS_PROCESSING_METHOD, null); 1180 return false; 1181 } 1182 1183 double[] coords = { l.getLatitude(), l.getLongitude(), l.getAltitude() }; 1184 String processMethod = translateLocationProviderToProcess(l.getProvider()); 1185 //JPEG_GPS_TIMESTAMP expects sec. instead of msec. 1186 long timestamp = l.getTime() / 1000; 1187 1188 set(CaptureRequest.JPEG_GPS_TIMESTAMP, timestamp); 1189 set(CaptureRequest.JPEG_GPS_COORDINATES, coords); 1190 1191 if (processMethod == null) { 1192 Log.w(TAG, "setGpsLocation - No process method, Location is not from a GPS or NETWORK" + 1193 "provider"); 1194 } else { 1195 setBase(CaptureRequest.JPEG_GPS_PROCESSING_METHOD, processMethod); 1196 } 1197 return true; 1198 } 1199 parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, StreamConfigurationMap fullMap, boolean isDepth, ArrayList<ArrayList<StreamConfiguration>> streamConfigList, ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList, ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList, boolean[] supportsPrivate)1200 private void parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, 1201 StreamConfigurationMap fullMap, boolean isDepth, 1202 ArrayList<ArrayList<StreamConfiguration>> /*out*/streamConfigList, 1203 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamDurationList, 1204 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamStallList, 1205 boolean[] /*out*/supportsPrivate) { 1206 1207 streamConfigList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1208 streamDurationList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1209 streamStallList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1210 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1211 streamConfigList.add(new ArrayList<StreamConfiguration> ()); 1212 streamDurationList.add(new ArrayList<StreamConfigurationDuration> ()); 1213 streamStallList.add(new ArrayList<StreamConfigurationDuration> ()); 1214 } 1215 1216 for (RecommendedStreamConfiguration c : configurations) { 1217 int width = c.getWidth(); 1218 int height = c.getHeight(); 1219 int internalFormat = c.getFormat(); 1220 int publicFormat = 1221 (isDepth) ? StreamConfigurationMap.depthFormatToPublic(internalFormat) : 1222 StreamConfigurationMap.imageFormatToPublic(internalFormat); 1223 Size sz = new Size(width, height); 1224 int usecaseBitmap = c.getUsecaseBitmap(); 1225 1226 if (!c.isInput()) { 1227 StreamConfigurationDuration minDurationConfiguration = null; 1228 StreamConfigurationDuration stallDurationConfiguration = null; 1229 1230 StreamConfiguration streamConfiguration = new StreamConfiguration(internalFormat, 1231 width, height, /*input*/ false); 1232 1233 long minFrameDuration = fullMap.getOutputMinFrameDuration(publicFormat, sz); 1234 if (minFrameDuration > 0) { 1235 minDurationConfiguration = new StreamConfigurationDuration(internalFormat, 1236 width, height, minFrameDuration); 1237 } 1238 1239 long stallDuration = fullMap.getOutputStallDuration(publicFormat, sz); 1240 if (stallDuration > 0) { 1241 stallDurationConfiguration = new StreamConfigurationDuration(internalFormat, 1242 width, height, stallDuration); 1243 } 1244 1245 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1246 if ((usecaseBitmap & (1 << i)) != 0) { 1247 ArrayList<StreamConfiguration> sc = streamConfigList.get(i); 1248 sc.add(streamConfiguration); 1249 1250 if (minFrameDuration > 0) { 1251 ArrayList<StreamConfigurationDuration> scd = streamDurationList.get(i); 1252 scd.add(minDurationConfiguration); 1253 } 1254 1255 if (stallDuration > 0) { 1256 ArrayList<StreamConfigurationDuration> scs = streamStallList.get(i); 1257 scs.add(stallDurationConfiguration); 1258 } 1259 1260 if ((supportsPrivate != null) && !supportsPrivate[i] && 1261 (publicFormat == ImageFormat.PRIVATE)) { 1262 supportsPrivate[i] = true; 1263 } 1264 } 1265 } 1266 } else { 1267 if (usecaseBitmap != (1 << RecommendedStreamConfigurationMap.USECASE_ZSL)) { 1268 throw new IllegalArgumentException("Recommended input stream configurations " + 1269 "should only be advertised in the ZSL use case!"); 1270 } 1271 1272 ArrayList<StreamConfiguration> sc = streamConfigList.get( 1273 RecommendedStreamConfigurationMap.USECASE_ZSL); 1274 sc.add(new StreamConfiguration(internalFormat, 1275 width, height, /*input*/ true)); 1276 } 1277 } 1278 } 1279 1280 private class StreamConfigurationData { 1281 StreamConfiguration [] streamConfigurationArray = null; 1282 StreamConfigurationDuration [] minDurationArray = null; 1283 StreamConfigurationDuration [] stallDurationArray = null; 1284 } 1285 initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, StreamConfigurationData scData)1286 public void initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, 1287 ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, 1288 StreamConfigurationData /*out*/scData) { 1289 if ((scData == null) || (sc == null)) { 1290 return; 1291 } 1292 1293 scData.streamConfigurationArray = new StreamConfiguration[sc.size()]; 1294 scData.streamConfigurationArray = sc.toArray(scData.streamConfigurationArray); 1295 1296 if ((scd != null) && !scd.isEmpty()) { 1297 scData.minDurationArray = new StreamConfigurationDuration[scd.size()]; 1298 scData.minDurationArray = scd.toArray(scData.minDurationArray); 1299 } else { 1300 scData.minDurationArray = new StreamConfigurationDuration[0]; 1301 } 1302 1303 if ((scs != null) && !scs.isEmpty()) { 1304 scData.stallDurationArray = new StreamConfigurationDuration[scs.size()]; 1305 scData.stallDurationArray = scs.toArray(scData.stallDurationArray); 1306 } else { 1307 scData.stallDurationArray = new StreamConfigurationDuration[0]; 1308 } 1309 } 1310 1311 /** 1312 * Retrieve the list of recommended stream configurations. 1313 * 1314 * @return A list of recommended stream configuration maps for each common use case or null 1315 * in case the recommended stream configurations are invalid or incomplete. 1316 * @hide 1317 */ getRecommendedStreamConfigurations()1318 public ArrayList<RecommendedStreamConfigurationMap> getRecommendedStreamConfigurations() { 1319 RecommendedStreamConfiguration[] configurations = getBase( 1320 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS); 1321 RecommendedStreamConfiguration[] depthConfigurations = getBase( 1322 CameraCharacteristics.DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS); 1323 if ((configurations == null) && (depthConfigurations == null)) { 1324 return null; 1325 } 1326 1327 StreamConfigurationMap fullMap = getStreamConfigurationMap(); 1328 ArrayList<RecommendedStreamConfigurationMap> recommendedConfigurations = 1329 new ArrayList<RecommendedStreamConfigurationMap> (); 1330 1331 ArrayList<ArrayList<StreamConfiguration>> streamConfigList = 1332 new ArrayList<ArrayList<StreamConfiguration>>(); 1333 ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList = 1334 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1335 ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList = 1336 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1337 boolean[] supportsPrivate = 1338 new boolean[RecommendedStreamConfigurationMap.MAX_USECASE_COUNT]; 1339 try { 1340 if (configurations != null) { 1341 parseRecommendedConfigurations(configurations, fullMap, /*isDepth*/ false, 1342 streamConfigList, streamDurationList, streamStallList, supportsPrivate); 1343 } 1344 } catch (IllegalArgumentException e) { 1345 Log.e(TAG, "Failed parsing the recommended stream configurations!"); 1346 return null; 1347 } 1348 1349 ArrayList<ArrayList<StreamConfiguration>> depthStreamConfigList = 1350 new ArrayList<ArrayList<StreamConfiguration>>(); 1351 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamDurationList = 1352 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1353 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamStallList = 1354 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1355 if (depthConfigurations != null) { 1356 try { 1357 parseRecommendedConfigurations(depthConfigurations, fullMap, /*isDepth*/ true, 1358 depthStreamConfigList, depthStreamDurationList, depthStreamStallList, 1359 /*supportsPrivate*/ null); 1360 } catch (IllegalArgumentException e) { 1361 Log.e(TAG, "Failed parsing the recommended depth stream configurations!"); 1362 return null; 1363 } 1364 } 1365 1366 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1367 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP); 1368 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1369 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1370 boolean listHighResolution = isBurstSupported(); 1371 recommendedConfigurations.ensureCapacity( 1372 RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1373 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1374 StreamConfigurationData scData = new StreamConfigurationData(); 1375 if (configurations != null) { 1376 initializeStreamConfigurationData(streamConfigList.get(i), 1377 streamDurationList.get(i), streamStallList.get(i), scData); 1378 } 1379 1380 StreamConfigurationData depthScData = new StreamConfigurationData(); 1381 if (depthConfigurations != null) { 1382 initializeStreamConfigurationData(depthStreamConfigList.get(i), 1383 depthStreamDurationList.get(i), depthStreamStallList.get(i), depthScData); 1384 } 1385 1386 if ((scData.streamConfigurationArray == null || 1387 scData.streamConfigurationArray.length == 0) && 1388 (depthScData.streamConfigurationArray == null || 1389 depthScData.streamConfigurationArray.length == 0)) { 1390 recommendedConfigurations.add(null); 1391 continue; 1392 } 1393 1394 // Dynamic depth streams involve alot of SW processing and currently cannot be 1395 // recommended. 1396 StreamConfigurationMap map = null; 1397 switch (i) { 1398 case RecommendedStreamConfigurationMap.USECASE_PREVIEW: 1399 case RecommendedStreamConfigurationMap.USECASE_RAW: 1400 case RecommendedStreamConfigurationMap.USECASE_LOW_LATENCY_SNAPSHOT: 1401 case RecommendedStreamConfigurationMap.USECASE_VIDEO_SNAPSHOT: 1402 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1403 scData.minDurationArray, scData.stallDurationArray, 1404 /*depthconfiguration*/ null, /*depthminduration*/ null, 1405 /*depthstallduration*/ null, 1406 /*dynamicDepthConfigurations*/ null, 1407 /*dynamicDepthMinFrameDurations*/ null, 1408 /*dynamicDepthStallDurations*/ null, 1409 /*heicconfiguration*/ null, 1410 /*heicminduration*/ null, 1411 /*heicstallduration*/ null, 1412 /*jpegRconfiguration*/ null, 1413 /*jpegRminduration*/ null, 1414 /*jpegRstallduration*/ null, 1415 /*heicUltraHDRconfiguration*/ null, 1416 /*heicUltraHDRminduration*/ null, 1417 /*heicUltraHDRstallduration*/ null, 1418 /*highspeedvideoconfigurations*/ null, 1419 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1420 break; 1421 case RecommendedStreamConfigurationMap.USECASE_RECORD: 1422 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1423 scData.minDurationArray, scData.stallDurationArray, 1424 /*depthconfiguration*/ null, /*depthminduration*/ null, 1425 /*depthstallduration*/ null, 1426 /*dynamicDepthConfigurations*/ null, 1427 /*dynamicDepthMinFrameDurations*/ null, 1428 /*dynamicDepthStallDurations*/ null, 1429 /*heicconfiguration*/ null, 1430 /*heicminduration*/ null, 1431 /*heicstallduration*/ null, 1432 /*jpegRconfiguration*/ null, 1433 /*jpegRminduration*/ null, 1434 /*jpegRstallduration*/ null, 1435 /*heicUltraHDRconfiguration*/ null, 1436 /*heicUltraHDRminduration*/ null, 1437 /*heicUltraHDRstallduration*/ null, 1438 highSpeedVideoConfigurations, 1439 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1440 break; 1441 case RecommendedStreamConfigurationMap.USECASE_ZSL: 1442 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1443 scData.minDurationArray, scData.stallDurationArray, 1444 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1445 depthScData.stallDurationArray, 1446 /*dynamicDepthConfigurations*/ null, 1447 /*dynamicDepthMinFrameDurations*/ null, 1448 /*dynamicDepthStallDurations*/ null, 1449 /*heicconfiguration*/ null, 1450 /*heicminduration*/ null, 1451 /*heicstallduration*/ null, 1452 /*jpegRconfiguration*/ null, 1453 /*jpegRminduration*/ null, 1454 /*jpegRstallduration*/ null, 1455 /*heicUltraHDRcconfiguration*/ null, 1456 /*heicUltraHDRminduration*/ null, 1457 /*heicUltraHDRstallduration*/ null, 1458 /*highSpeedVideoConfigurations*/ null, 1459 inputOutputFormatsMap, listHighResolution, supportsPrivate[i]); 1460 break; 1461 default: 1462 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1463 scData.minDurationArray, scData.stallDurationArray, 1464 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1465 depthScData.stallDurationArray, 1466 /*dynamicDepthConfigurations*/ null, 1467 /*dynamicDepthMinFrameDurations*/ null, 1468 /*dynamicDepthStallDurations*/ null, 1469 /*heicconfiguration*/ null, 1470 /*heicminduration*/ null, 1471 /*heicstallduration*/ null, 1472 /*jpegRconfiguration*/ null, 1473 /*jpegRminduration*/ null, 1474 /*jpegRstallduration*/ null, 1475 /*heicUltraHDRcconfiguration*/ null, 1476 /*heicUltraHDRminduration*/ null, 1477 /*heicUltraHDRstallduration*/ null, 1478 /*highSpeedVideoConfigurations*/ null, 1479 /*inputOutputFormatsMap*/ null, listHighResolution, supportsPrivate[i]); 1480 } 1481 1482 recommendedConfigurations.add(new RecommendedStreamConfigurationMap(map, /*usecase*/i, 1483 supportsPrivate[i])); 1484 } 1485 1486 return recommendedConfigurations; 1487 } 1488 isCapabilitySupported(int capabilityRequested)1489 private boolean isCapabilitySupported(int capabilityRequested) { 1490 boolean ret = false; 1491 1492 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1493 for (int capability : capabilities) { 1494 if (capabilityRequested == capability) { 1495 ret = true; 1496 break; 1497 } 1498 } 1499 1500 return ret; 1501 } 1502 1503 /** 1504 * @hide 1505 */ isUltraHighResolutionSensor()1506 public boolean isUltraHighResolutionSensor() { 1507 return isCapabilitySupported( 1508 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR); 1509 1510 } isBurstSupported()1511 private boolean isBurstSupported() { 1512 return isCapabilitySupported( 1513 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE); 1514 } 1515 isPreviewStabilizationSupported()1516 private boolean isPreviewStabilizationSupported() { 1517 boolean ret = false; 1518 1519 int[] videoStabilizationModes = 1520 getBase(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); 1521 if (videoStabilizationModes == null) { 1522 return false; 1523 } 1524 for (int mode : videoStabilizationModes) { 1525 if (mode == CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION) { 1526 ret = true; 1527 break; 1528 } 1529 } 1530 1531 return ret; 1532 } 1533 isCroppedRawSupported()1534 private boolean isCroppedRawSupported() { 1535 boolean ret = false; 1536 1537 long[] streamUseCases = 1538 getBase(CameraCharacteristics.SCALER_AVAILABLE_STREAM_USE_CASES); 1539 if (streamUseCases == null) { 1540 return false; 1541 } 1542 for (long useCase : streamUseCases) { 1543 if (useCase == CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW) { 1544 return true; 1545 } 1546 } 1547 1548 return ret; 1549 } 1550 getMandatoryStreamCombinationsHelper( int mandatoryStreamsType)1551 private MandatoryStreamCombination[] getMandatoryStreamCombinationsHelper( 1552 int mandatoryStreamsType) { 1553 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1554 ArrayList<Integer> caps = new ArrayList<Integer>(); 1555 caps.ensureCapacity(capabilities.length); 1556 for (int c : capabilities) { 1557 caps.add(new Integer(c)); 1558 } 1559 int hwLevel = getBase(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); 1560 MandatoryStreamCombination.Builder build = new MandatoryStreamCombination.Builder( 1561 mCameraId, hwLevel, mDisplaySize, caps, getStreamConfigurationMap(), 1562 getStreamConfigurationMapMaximumResolution(), isPreviewStabilizationSupported(), 1563 isCroppedRawSupported()); 1564 1565 List<MandatoryStreamCombination> combs = null; 1566 switch (mandatoryStreamsType) { 1567 case MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT: 1568 combs = build.getAvailableMandatoryConcurrentStreamCombinations(); 1569 break; 1570 case MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION: 1571 combs = build.getAvailableMandatoryMaximumResolutionStreamCombinations(); 1572 break; 1573 case MANDATORY_STREAM_CONFIGURATIONS_10BIT: 1574 combs = build.getAvailableMandatory10BitStreamCombinations(); 1575 break; 1576 case MANDATORY_STREAM_CONFIGURATIONS_USE_CASE: 1577 combs = build.getAvailableMandatoryStreamUseCaseCombinations(); 1578 break; 1579 case MANDATORY_STREAM_CONFIGURATIONS_PREVIEW_STABILIZATION: 1580 combs = build.getAvailableMandatoryPreviewStabilizedStreamCombinations(); 1581 break; 1582 default: 1583 combs = build.getAvailableMandatoryStreamCombinations(); 1584 } 1585 if ((combs != null) && (!combs.isEmpty())) { 1586 MandatoryStreamCombination[] combArray = new MandatoryStreamCombination[combs.size()]; 1587 combArray = combs.toArray(combArray); 1588 return combArray; 1589 } 1590 return null; 1591 } 1592 getMandatory10BitStreamCombinations()1593 private MandatoryStreamCombination[] getMandatory10BitStreamCombinations() { 1594 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_10BIT); 1595 } 1596 getMandatoryConcurrentStreamCombinations()1597 private MandatoryStreamCombination[] getMandatoryConcurrentStreamCombinations() { 1598 if (!mHasMandatoryConcurrentStreams) { 1599 return null; 1600 } 1601 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT); 1602 } 1603 getMandatoryMaximumResolutionStreamCombinations()1604 private MandatoryStreamCombination[] getMandatoryMaximumResolutionStreamCombinations() { 1605 if (!isUltraHighResolutionSensor()) { 1606 return null; 1607 } 1608 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION); 1609 } 1610 getMandatoryStreamCombinations()1611 private MandatoryStreamCombination[] getMandatoryStreamCombinations() { 1612 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_DEFAULT); 1613 } 1614 getMandatoryUseCaseStreamCombinations()1615 private MandatoryStreamCombination[] getMandatoryUseCaseStreamCombinations() { 1616 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_USE_CASE); 1617 } 1618 getMandatoryPreviewStabilizationStreamCombinations()1619 private MandatoryStreamCombination[] getMandatoryPreviewStabilizationStreamCombinations() { 1620 return getMandatoryStreamCombinationsHelper( 1621 MANDATORY_STREAM_CONFIGURATIONS_PREVIEW_STABILIZATION); 1622 } 1623 getStreamConfigurationMap()1624 private StreamConfigurationMap getStreamConfigurationMap() { 1625 StreamConfiguration[] configurations = getBase( 1626 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS); 1627 StreamConfigurationDuration[] minFrameDurations = getBase( 1628 CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS); 1629 StreamConfigurationDuration[] stallDurations = getBase( 1630 CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS); 1631 StreamConfiguration[] depthConfigurations = getBase( 1632 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS); 1633 StreamConfigurationDuration[] depthMinFrameDurations = getBase( 1634 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS); 1635 StreamConfigurationDuration[] depthStallDurations = getBase( 1636 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS); 1637 StreamConfiguration[] dynamicDepthConfigurations = getBase( 1638 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS); 1639 StreamConfigurationDuration[] dynamicDepthMinFrameDurations = getBase( 1640 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS); 1641 StreamConfigurationDuration[] dynamicDepthStallDurations = getBase( 1642 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS); 1643 StreamConfiguration[] heicConfigurations = getBase( 1644 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS); 1645 StreamConfigurationDuration[] heicMinFrameDurations = getBase( 1646 CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS); 1647 StreamConfigurationDuration[] heicStallDurations = getBase( 1648 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS); 1649 StreamConfiguration[] heicUltraHDRConfigurations = null; 1650 StreamConfigurationDuration[] heicUltraHDRMinFrameDurations = null; 1651 StreamConfigurationDuration[] heicUltraHDRStallDurations = null; 1652 if (Flags.cameraHeifGainmap()) { 1653 heicUltraHDRConfigurations = getBase( 1654 CameraCharacteristics.HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS); 1655 heicUltraHDRMinFrameDurations = getBase( 1656 CameraCharacteristics.HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS); 1657 heicUltraHDRStallDurations = getBase( 1658 CameraCharacteristics.HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS); 1659 } 1660 StreamConfiguration[] jpegRConfigurations = getBase( 1661 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS); 1662 StreamConfigurationDuration[] jpegRMinFrameDurations = getBase( 1663 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS); 1664 StreamConfigurationDuration[] jpegRStallDurations = getBase( 1665 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS); 1666 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1667 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1668 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1669 CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP); 1670 boolean listHighResolution = isBurstSupported(); 1671 return new StreamConfigurationMap( 1672 configurations, minFrameDurations, stallDurations, 1673 depthConfigurations, depthMinFrameDurations, depthStallDurations, 1674 dynamicDepthConfigurations, dynamicDepthMinFrameDurations, 1675 dynamicDepthStallDurations, heicConfigurations, 1676 heicMinFrameDurations, heicStallDurations, 1677 jpegRConfigurations, jpegRMinFrameDurations, jpegRStallDurations, 1678 heicUltraHDRConfigurations, heicUltraHDRMinFrameDurations, 1679 heicUltraHDRStallDurations, highSpeedVideoConfigurations, inputOutputFormatsMap, 1680 listHighResolution); 1681 } 1682 getSharedSessionConfiguration()1683 private SharedSessionConfiguration getSharedSessionConfiguration() { 1684 if (!Flags.cameraMultiClient()) { 1685 return null; 1686 } 1687 Integer sharedSessionColorSpace = getBase( 1688 CameraCharacteristics.SHARED_SESSION_COLOR_SPACE); 1689 long[] sharedOutputConfigurations = getBase( 1690 CameraCharacteristics.SHARED_SESSION_OUTPUT_CONFIGURATIONS); 1691 1692 if ((sharedSessionColorSpace == null) || (sharedOutputConfigurations == null)) { 1693 return null; 1694 } 1695 1696 return new SharedSessionConfiguration(sharedSessionColorSpace, sharedOutputConfigurations); 1697 } 1698 getStreamConfigurationMapMaximumResolution()1699 private StreamConfigurationMap getStreamConfigurationMapMaximumResolution() { 1700 StreamConfiguration[] configurations = getBase( 1701 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1702 StreamConfigurationDuration[] minFrameDurations = getBase( 1703 CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1704 StreamConfigurationDuration[] stallDurations = getBase( 1705 CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1706 // If the at least these keys haven't been advertised, there cannot be a meaningful max 1707 // resolution StreamConfigurationMap 1708 if (configurations == null || 1709 minFrameDurations == null || 1710 stallDurations == null) { 1711 return null; 1712 } 1713 1714 StreamConfiguration[] depthConfigurations = getBase( 1715 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1716 StreamConfigurationDuration[] depthMinFrameDurations = getBase( 1717 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1718 StreamConfigurationDuration[] depthStallDurations = getBase( 1719 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1720 StreamConfiguration[] dynamicDepthConfigurations = getBase( 1721 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1722 StreamConfigurationDuration[] dynamicDepthMinFrameDurations = getBase( 1723 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1724 StreamConfigurationDuration[] dynamicDepthStallDurations = getBase( 1725 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1726 StreamConfiguration[] heicConfigurations = getBase( 1727 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1728 StreamConfigurationDuration[] heicMinFrameDurations = getBase( 1729 CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1730 StreamConfigurationDuration[] heicStallDurations = getBase( 1731 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1732 StreamConfiguration[] heicUltraHDRConfigurations = null; 1733 StreamConfigurationDuration[] heicUltraHDRMinFrameDurations = null; 1734 StreamConfigurationDuration[] heicUltraHDRStallDurations = null; 1735 if (Flags.cameraHeifGainmap()) { 1736 heicUltraHDRConfigurations = getBase( 1737 CameraCharacteristics.HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1738 heicUltraHDRMinFrameDurations = getBase( 1739 CameraCharacteristics.HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1740 heicUltraHDRStallDurations = getBase( 1741 CameraCharacteristics.HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1742 } 1743 StreamConfiguration[] jpegRConfigurations = getBase( 1744 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1745 StreamConfigurationDuration[] jpegRMinFrameDurations = getBase( 1746 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1747 StreamConfigurationDuration[] jpegRStallDurations = getBase( 1748 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1749 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1750 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1751 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1752 CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION); 1753 // TODO: Is this correct, burst capability shouldn't necessarily correspond to max res mode 1754 boolean listHighResolution = isBurstSupported(); 1755 return new StreamConfigurationMap( 1756 configurations, minFrameDurations, stallDurations, 1757 depthConfigurations, depthMinFrameDurations, depthStallDurations, 1758 dynamicDepthConfigurations, dynamicDepthMinFrameDurations, 1759 dynamicDepthStallDurations, heicConfigurations, 1760 heicMinFrameDurations, heicStallDurations, 1761 jpegRConfigurations, jpegRMinFrameDurations, jpegRStallDurations, 1762 heicUltraHDRConfigurations, heicUltraHDRMinFrameDurations, 1763 heicUltraHDRStallDurations, highSpeedVideoConfigurations, inputOutputFormatsMap, 1764 listHighResolution, false); 1765 } 1766 getMaxRegions(Key<T> key)1767 private <T> Integer getMaxRegions(Key<T> key) { 1768 final int AE = 0; 1769 final int AWB = 1; 1770 final int AF = 2; 1771 1772 // The order of the elements is: (AE, AWB, AF) 1773 int[] maxRegions = getBase(CameraCharacteristics.CONTROL_MAX_REGIONS); 1774 1775 if (maxRegions == null) { 1776 return null; 1777 } 1778 1779 if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AE)) { 1780 return maxRegions[AE]; 1781 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB)) { 1782 return maxRegions[AWB]; 1783 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AF)) { 1784 return maxRegions[AF]; 1785 } else { 1786 throw new AssertionError("Invalid key " + key); 1787 } 1788 } 1789 getMaxNumOutputs(Key<T> key)1790 private <T> Integer getMaxNumOutputs(Key<T> key) { 1791 final int RAW = 0; 1792 final int PROC = 1; 1793 final int PROC_STALLING = 2; 1794 1795 // The order of the elements is: (raw, proc+nonstalling, proc+stalling) 1796 int[] maxNumOutputs = getBase(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS); 1797 1798 if (maxNumOutputs == null) { 1799 return null; 1800 } 1801 1802 if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)) { 1803 return maxNumOutputs[RAW]; 1804 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)) { 1805 return maxNumOutputs[PROC]; 1806 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)) { 1807 return maxNumOutputs[PROC_STALLING]; 1808 } else { 1809 throw new AssertionError("Invalid key " + key); 1810 } 1811 } 1812 getTonemapCurve()1813 private <T> TonemapCurve getTonemapCurve() { 1814 float[] red = getBase(CaptureRequest.TONEMAP_CURVE_RED); 1815 float[] green = getBase(CaptureRequest.TONEMAP_CURVE_GREEN); 1816 float[] blue = getBase(CaptureRequest.TONEMAP_CURVE_BLUE); 1817 1818 if (areValuesAllNull(red, green, blue)) { 1819 return null; 1820 } 1821 1822 if (red == null || green == null || blue == null) { 1823 Log.w(TAG, "getTonemapCurve - missing tone curve components"); 1824 return null; 1825 } 1826 TonemapCurve tc = new TonemapCurve(red, green, blue); 1827 return tc; 1828 } 1829 getOisSamples()1830 private OisSample[] getOisSamples() { 1831 long[] timestamps = getBase(CaptureResult.STATISTICS_OIS_TIMESTAMPS); 1832 float[] xShifts = getBase(CaptureResult.STATISTICS_OIS_X_SHIFTS); 1833 float[] yShifts = getBase(CaptureResult.STATISTICS_OIS_Y_SHIFTS); 1834 1835 if (timestamps == null) { 1836 if (xShifts != null) { 1837 throw new AssertionError("timestamps is null but xShifts is not"); 1838 } 1839 1840 if (yShifts != null) { 1841 throw new AssertionError("timestamps is null but yShifts is not"); 1842 } 1843 1844 return null; 1845 } 1846 1847 if (xShifts == null) { 1848 throw new AssertionError("timestamps is not null but xShifts is"); 1849 } 1850 1851 if (yShifts == null) { 1852 throw new AssertionError("timestamps is not null but yShifts is"); 1853 } 1854 1855 if (xShifts.length != timestamps.length) { 1856 throw new AssertionError(String.format( 1857 "timestamps has %d entries but xShifts has %d", timestamps.length, 1858 xShifts.length)); 1859 } 1860 1861 if (yShifts.length != timestamps.length) { 1862 throw new AssertionError(String.format( 1863 "timestamps has %d entries but yShifts has %d", timestamps.length, 1864 yShifts.length)); 1865 } 1866 1867 OisSample[] samples = new OisSample[timestamps.length]; 1868 for (int i = 0; i < timestamps.length; i++) { 1869 samples[i] = new OisSample(timestamps[i], xShifts[i], yShifts[i]); 1870 } 1871 return samples; 1872 } 1873 setLensIntrinsicsSamples(LensIntrinsicsSample[] samples)1874 private boolean setLensIntrinsicsSamples(LensIntrinsicsSample[] samples) { 1875 if (samples == null) { 1876 return false; 1877 } 1878 1879 long[] tsArray = new long[samples.length]; 1880 float[] intrinsicsArray = new float[samples.length * 5]; 1881 for (int i = 0; i < samples.length; i++) { 1882 tsArray[i] = samples[i].getTimestampNanos(); 1883 System.arraycopy(samples[i].getLensIntrinsics(), 0, intrinsicsArray, 5 * i, 5); 1884 1885 } 1886 setBase(CaptureResult.STATISTICS_LENS_INTRINSIC_SAMPLES, intrinsicsArray); 1887 setBase(CaptureResult.STATISTICS_LENS_INTRINSIC_TIMESTAMPS, tsArray); 1888 1889 return true; 1890 } 1891 getLensIntrinsicSamples()1892 private LensIntrinsicsSample[] getLensIntrinsicSamples() { 1893 long[] timestamps = getBase(CaptureResult.STATISTICS_LENS_INTRINSIC_TIMESTAMPS); 1894 float[] intrinsics = getBase(CaptureResult.STATISTICS_LENS_INTRINSIC_SAMPLES); 1895 1896 if (timestamps == null) { 1897 if (intrinsics != null) { 1898 throw new AssertionError("timestamps is null but intrinsics is not"); 1899 } 1900 1901 return null; 1902 } 1903 1904 if (intrinsics == null) { 1905 throw new AssertionError("timestamps is not null but intrinsics is"); 1906 } else if ((intrinsics.length % 5) != 0) { 1907 throw new AssertionError("intrinsics are not multiple of 5"); 1908 } 1909 1910 if ((intrinsics.length / 5) != timestamps.length) { 1911 throw new AssertionError(String.format( 1912 "timestamps has %d entries but intrinsics has %d", timestamps.length, 1913 intrinsics.length / 5)); 1914 } 1915 1916 LensIntrinsicsSample[] samples = new LensIntrinsicsSample[timestamps.length]; 1917 for (int i = 0; i < timestamps.length; i++) { 1918 float[] currentIntrinsic = Arrays.copyOfRange(intrinsics, 5 * i, 5 * i + 5); 1919 samples[i] = new LensIntrinsicsSample(timestamps[i], currentIntrinsic); 1920 } 1921 return samples; 1922 } 1923 getExtendedSceneModeCapabilities()1924 private Capability[] getExtendedSceneModeCapabilities() { 1925 int[] maxSizes = 1926 getBase(CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES); 1927 float[] zoomRanges = getBase( 1928 CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES); 1929 Range<Float> zoomRange = getBase(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE); 1930 float maxDigitalZoom = getBase(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); 1931 1932 if (maxSizes == null) { 1933 return null; 1934 } 1935 if (maxSizes.length % 3 != 0) { 1936 throw new AssertionError("availableExtendedSceneModeMaxSizes must be tuples of " 1937 + "[mode, width, height]"); 1938 } 1939 int numExtendedSceneModes = maxSizes.length / 3; 1940 int numExtendedSceneModeZoomRanges = 0; 1941 if (zoomRanges != null) { 1942 if (zoomRanges.length % 2 != 0) { 1943 throw new AssertionError("availableExtendedSceneModeZoomRanges must be tuples of " 1944 + "[minZoom, maxZoom]"); 1945 } 1946 numExtendedSceneModeZoomRanges = zoomRanges.length / 2; 1947 if (numExtendedSceneModes - numExtendedSceneModeZoomRanges != 1) { 1948 throw new AssertionError("Number of extended scene mode zoom ranges must be 1 " 1949 + "less than number of supported modes"); 1950 } 1951 } 1952 1953 float modeOffMinZoomRatio = 1.0f; 1954 float modeOffMaxZoomRatio = maxDigitalZoom; 1955 if (zoomRange != null) { 1956 modeOffMinZoomRatio = zoomRange.getLower(); 1957 modeOffMaxZoomRatio = zoomRange.getUpper(); 1958 } 1959 1960 Capability[] capabilities = new Capability[numExtendedSceneModes]; 1961 for (int i = 0, j = 0; i < numExtendedSceneModes; i++) { 1962 int mode = maxSizes[3 * i]; 1963 int width = maxSizes[3 * i + 1]; 1964 int height = maxSizes[3 * i + 2]; 1965 if (mode != CameraMetadata.CONTROL_EXTENDED_SCENE_MODE_DISABLED 1966 && j < numExtendedSceneModeZoomRanges) { 1967 capabilities[i] = new Capability(mode, new Size(width, height), 1968 new Range<Float>(zoomRanges[2 * j], zoomRanges[2 * j + 1])); 1969 j++; 1970 } else { 1971 capabilities[i] = new Capability(mode, new Size(width, height), 1972 new Range<Float>(modeOffMinZoomRatio, modeOffMaxZoomRatio)); 1973 } 1974 } 1975 1976 return capabilities; 1977 } 1978 setBase(CameraCharacteristics.Key<T> key, T value)1979 private <T> void setBase(CameraCharacteristics.Key<T> key, T value) { 1980 setBase(key.getNativeKey(), value); 1981 } 1982 setBase(CaptureResult.Key<T> key, T value)1983 private <T> void setBase(CaptureResult.Key<T> key, T value) { 1984 setBase(key.getNativeKey(), value); 1985 } 1986 setBase(CaptureRequest.Key<T> key, T value)1987 private <T> void setBase(CaptureRequest.Key<T> key, T value) { 1988 setBase(key.getNativeKey(), value); 1989 } 1990 1991 // The whole method needs to be synchronized since we're making 1992 // multiple calls to the native layer. From one call to the other (within setBase) 1993 // we expect the metadata's properties such as vendor id etc to 1994 // stay the same and as a result the whole method should be synchronized for safety. setBase(Key<T> key, T value)1995 private synchronized <T> void setBase(Key<T> key, T value) { 1996 int tag, nativeType; 1997 if (key.hasTag()) { 1998 tag = key.getTag(); 1999 } else { 2000 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.getName()); 2001 key.cacheTag(tag); 2002 } 2003 if (value == null) { 2004 // Erase the entry 2005 writeValues(tag, /*src*/null); 2006 return; 2007 } // else update the entry to a new value 2008 2009 nativeType = nativeGetTypeFromTagLocal(mMetadataPtr, tag); 2010 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 2011 int size = marshaler.calculateMarshalSize(value); 2012 2013 // TODO: Optimization. Cache the byte[] and reuse if the size is big enough. 2014 byte[] values = new byte[size]; 2015 2016 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 2017 marshaler.marshal(value, buffer); 2018 2019 writeValues(tag, values); 2020 } 2021 2022 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 2023 // metadata. 2024 private static final HashMap<Key<?>, SetCommand> sSetCommandMap = 2025 new HashMap<Key<?>, SetCommand>(); 2026 static { CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey()2027 sSetCommandMap.put(CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), 2028 new SetCommand() { 2029 @Override 2030 public <T> void setValue(CameraMetadataNative metadata, T value) { 2031 metadata.setAvailableFormats((int[]) value); 2032 } 2033 }); CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey()2034 sSetCommandMap.put(CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), 2035 new SetCommand() { 2036 @Override 2037 public <T> void setValue(CameraMetadataNative metadata, T value) { 2038 metadata.setFaceRectangles((Rect[]) value); 2039 } 2040 }); CaptureResult.STATISTICS_FACES.getNativeKey()2041 sSetCommandMap.put(CaptureResult.STATISTICS_FACES.getNativeKey(), 2042 new SetCommand() { 2043 @Override 2044 public <T> void setValue(CameraMetadataNative metadata, T value) { 2045 metadata.setFaces((Face[])value); 2046 } 2047 }); CaptureRequest.TONEMAP_CURVE.getNativeKey()2048 sSetCommandMap.put(CaptureRequest.TONEMAP_CURVE.getNativeKey(), new SetCommand() { 2049 @Override 2050 public <T> void setValue(CameraMetadataNative metadata, T value) { 2051 metadata.setTonemapCurve((TonemapCurve) value); 2052 } 2053 }); CaptureResult.JPEG_GPS_LOCATION.getNativeKey()2054 sSetCommandMap.put(CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new SetCommand() { 2055 @Override 2056 public <T> void setValue(CameraMetadataNative metadata, T value) { 2057 metadata.setGpsLocation((Location) value); 2058 } 2059 }); CaptureRequest.SCALER_CROP_REGION.getNativeKey()2060 sSetCommandMap.put(CaptureRequest.SCALER_CROP_REGION.getNativeKey(), 2061 new SetCommand() { 2062 @Override 2063 public <T> void setValue(CameraMetadataNative metadata, T value) { 2064 metadata.setScalerCropRegion((Rect) value); 2065 } 2066 }); CaptureRequest.CONTROL_AWB_REGIONS.getNativeKey()2067 sSetCommandMap.put(CaptureRequest.CONTROL_AWB_REGIONS.getNativeKey(), 2068 new SetCommand() { 2069 @Override 2070 public <T> void setValue(CameraMetadataNative metadata, T value) { 2071 metadata.setAWBRegions(value); 2072 } 2073 }); CaptureRequest.CONTROL_AF_REGIONS.getNativeKey()2074 sSetCommandMap.put(CaptureRequest.CONTROL_AF_REGIONS.getNativeKey(), 2075 new SetCommand() { 2076 @Override 2077 public <T> void setValue(CameraMetadataNative metadata, T value) { 2078 metadata.setAFRegions(value); 2079 } 2080 }); CaptureRequest.CONTROL_AE_REGIONS.getNativeKey()2081 sSetCommandMap.put(CaptureRequest.CONTROL_AE_REGIONS.getNativeKey(), 2082 new SetCommand() { 2083 @Override 2084 public <T> void setValue(CameraMetadataNative metadata, T value) { 2085 metadata.setAERegions(value); 2086 } 2087 }); CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey()2088 sSetCommandMap.put(CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey(), 2089 new SetCommand() { 2090 @Override 2091 public <T> void setValue(CameraMetadataNative metadata, T value) { 2092 metadata.setLensShadingMap((LensShadingMap) value); 2093 } 2094 }); 2095 sSetCommandMap.put( CaptureResult.STATISTICS_LENS_INTRINSICS_SAMPLES.getNativeKey()2096 CaptureResult.STATISTICS_LENS_INTRINSICS_SAMPLES.getNativeKey(), 2097 new SetCommand() { 2098 @Override 2099 @SuppressWarnings("unchecked") 2100 public <T> void setValue(CameraMetadataNative metadata, T value) { 2101 metadata.setLensIntrinsicsSamples((LensIntrinsicsSample []) value); 2102 } 2103 }); 2104 } 2105 setAvailableFormats(int[] value)2106 private boolean setAvailableFormats(int[] value) { 2107 int[] availableFormat = value; 2108 if (value == null) { 2109 // Let setBase() to handle the null value case. 2110 return false; 2111 } 2112 2113 int[] newValues = new int[availableFormat.length]; 2114 for (int i = 0; i < availableFormat.length; i++) { 2115 newValues[i] = availableFormat[i]; 2116 if (availableFormat[i] == ImageFormat.JPEG) { 2117 newValues[i] = NATIVE_JPEG_FORMAT; 2118 } 2119 } 2120 2121 setBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS, newValues); 2122 return true; 2123 } 2124 2125 /** 2126 * Convert Face Rectangles from managed side to native side as they have different definitions. 2127 * <p> 2128 * Managed side face rectangles are defined as: left, top, width, height. 2129 * Native side face rectangles are defined as: left, top, right, bottom. 2130 * The input face rectangle need to be converted to native side definition when set is called. 2131 * </p> 2132 * 2133 * @param faceRects Input face rectangles. 2134 * @return true if face rectangles can be set successfully. Otherwise, Let the caller 2135 * (setBase) to handle it appropriately. 2136 */ setFaceRectangles(Rect[] faceRects)2137 private boolean setFaceRectangles(Rect[] faceRects) { 2138 if (faceRects == null) { 2139 return false; 2140 } 2141 2142 Rect[] newFaceRects = new Rect[faceRects.length]; 2143 for (int i = 0; i < newFaceRects.length; i++) { 2144 newFaceRects[i] = new Rect( 2145 faceRects[i].left, 2146 faceRects[i].top, 2147 faceRects[i].right + faceRects[i].left, 2148 faceRects[i].bottom + faceRects[i].top); 2149 } 2150 2151 setBase(CaptureResult.STATISTICS_FACE_RECTANGLES, newFaceRects); 2152 return true; 2153 } 2154 setTonemapCurve(TonemapCurve tc)2155 private <T> boolean setTonemapCurve(TonemapCurve tc) { 2156 if (tc == null) { 2157 return false; 2158 } 2159 2160 float[][] curve = new float[3][]; 2161 for (int i = TonemapCurve.CHANNEL_RED; i <= TonemapCurve.CHANNEL_BLUE; i++) { 2162 int pointCount = tc.getPointCount(i); 2163 curve[i] = new float[pointCount * TonemapCurve.POINT_SIZE]; 2164 tc.copyColorCurve(i, curve[i], 0); 2165 } 2166 setBase(CaptureRequest.TONEMAP_CURVE_RED, curve[0]); 2167 setBase(CaptureRequest.TONEMAP_CURVE_GREEN, curve[1]); 2168 setBase(CaptureRequest.TONEMAP_CURVE_BLUE, curve[2]); 2169 2170 return true; 2171 } 2172 setScalerCropRegion(Rect cropRegion)2173 private <T> boolean setScalerCropRegion(Rect cropRegion) { 2174 if (cropRegion == null) { 2175 return false; 2176 } 2177 setBase(CaptureRequest.SCALER_CROP_REGION_SET, true); 2178 setBase(CaptureRequest.SCALER_CROP_REGION, cropRegion); 2179 return true; 2180 } 2181 setAFRegions(T afRegions)2182 private <T> boolean setAFRegions(T afRegions) { 2183 if (afRegions == null) { 2184 return false; 2185 } 2186 setBase(CaptureRequest.CONTROL_AF_REGIONS_SET, true); 2187 // The cast to CaptureRequest.Key is needed since java does not support template 2188 // specialization and we need to route this method to 2189 // setBase(CaptureRequest.Key<T> key, T value) 2190 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AF_REGIONS, afRegions); 2191 return true; 2192 } 2193 setAERegions(T aeRegions)2194 private <T> boolean setAERegions(T aeRegions) { 2195 if (aeRegions == null) { 2196 return false; 2197 } 2198 setBase(CaptureRequest.CONTROL_AE_REGIONS_SET, true); 2199 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AE_REGIONS, aeRegions); 2200 return true; 2201 } 2202 setAWBRegions(T awbRegions)2203 private <T> boolean setAWBRegions(T awbRegions) { 2204 if (awbRegions == null) { 2205 return false; 2206 } 2207 setBase(CaptureRequest.CONTROL_AWB_REGIONS_SET, true); 2208 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AWB_REGIONS, awbRegions); 2209 return true; 2210 } 2211 updateNativeAllocation()2212 private synchronized void updateNativeAllocation() { 2213 long currentBufferSize = nativeGetBufferSize(mMetadataPtr); 2214 2215 if (currentBufferSize != mBufferSize) { 2216 if (mBufferSize > 0) { 2217 VMRuntime.getRuntime().registerNativeFree(mBufferSize); 2218 } 2219 2220 mBufferSize = currentBufferSize; 2221 2222 if (mBufferSize > 0) { 2223 VMRuntime.getRuntime().registerNativeAllocation(mBufferSize); 2224 } 2225 } 2226 } 2227 2228 private int mCameraId = -1; 2229 private boolean mHasMandatoryConcurrentStreams = false; 2230 private Size mDisplaySize = new Size(0, 0); 2231 private long mBufferSize = 0; 2232 private MultiResolutionStreamConfigurationMap mMultiResolutionStreamConfigurationMap = null; 2233 2234 /** 2235 * Set the current camera Id. 2236 * 2237 * @param cameraId Current camera id. 2238 * 2239 * @hide 2240 */ setCameraId(int cameraId)2241 public void setCameraId(int cameraId) { 2242 mCameraId = cameraId; 2243 } 2244 2245 /** 2246 * Set the current camera Id. 2247 * 2248 * @param hasMandatoryConcurrentStreams whether the metadata advertises mandatory concurrent 2249 * streams. 2250 * 2251 * @hide 2252 */ setHasMandatoryConcurrentStreams(boolean hasMandatoryConcurrentStreams)2253 public void setHasMandatoryConcurrentStreams(boolean hasMandatoryConcurrentStreams) { 2254 mHasMandatoryConcurrentStreams = hasMandatoryConcurrentStreams; 2255 } 2256 2257 /** 2258 * Set the current display size. 2259 * 2260 * @param displaySize The current display size. 2261 * 2262 * @hide 2263 */ setDisplaySize(Size displaySize)2264 public void setDisplaySize(Size displaySize) { 2265 mDisplaySize = displaySize; 2266 } 2267 2268 /** 2269 * Set the multi-resolution stream configuration map. 2270 * 2271 * @param multiResolutionMap The multi-resolution stream configuration map. 2272 * 2273 * @hide 2274 */ setMultiResolutionStreamConfigurationMap( @onNull Map<String, StreamConfiguration[]> multiResolutionMap)2275 public void setMultiResolutionStreamConfigurationMap( 2276 @NonNull Map<String, StreamConfiguration[]> multiResolutionMap) { 2277 mMultiResolutionStreamConfigurationMap = 2278 new MultiResolutionStreamConfigurationMap(multiResolutionMap); 2279 } 2280 2281 /** 2282 * Get the multi-resolution stream configuration map. 2283 * 2284 * @return The multi-resolution stream configuration map. 2285 * 2286 * @hide 2287 */ getMultiResolutionStreamConfigurationMap()2288 public MultiResolutionStreamConfigurationMap getMultiResolutionStreamConfigurationMap() { 2289 return mMultiResolutionStreamConfigurationMap; 2290 } 2291 2292 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 2293 private long mMetadataPtr; // native std::shared_ptr<CameraMetadata>* 2294 2295 // FastNative doesn't work with synchronized methods and we can do synchronization 2296 // wherever needed in the java layer (caller). At some places in java such as 2297 // setBase() / getBase(), we do need to synchronize the whole method, so leaving 2298 // synchronized out for these native methods. 2299 2300 @FastNative nativeAllocate()2301 private static native long nativeAllocate(); 2302 @FastNative nativeAllocateCopy(long ptr)2303 private static native long nativeAllocateCopy(long ptr) 2304 throws NullPointerException; 2305 2306 2307 @FastNative nativeUpdate(long dst, long src)2308 private static native void nativeUpdate(long dst, long src); 2309 @FastNative nativeWriteToParcel(Parcel dest, long ptr)2310 private static native void nativeWriteToParcel(Parcel dest, long ptr); 2311 @FastNative nativeReadFromParcel(Parcel source, long ptr)2312 private static native void nativeReadFromParcel(Parcel source, long ptr); 2313 @FastNative nativeSwap(long ptr, long otherPtr)2314 private static native void nativeSwap(long ptr, long otherPtr) 2315 throws NullPointerException; 2316 @FastNative nativeSetVendorId(long ptr, long vendorId)2317 private static native void nativeSetVendorId(long ptr, long vendorId); 2318 @FastNative nativeClose(long ptr)2319 private static native void nativeClose(long ptr); 2320 @FastNative nativeIsEmpty(long ptr)2321 private static native boolean nativeIsEmpty(long ptr); 2322 @FastNative nativeGetEntryCount(long ptr)2323 private static native int nativeGetEntryCount(long ptr); 2324 @FastNative nativeGetBufferSize(long ptr)2325 private static native long nativeGetBufferSize(long ptr); 2326 2327 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 2328 @FastNative nativeReadValues(int tag, long ptr)2329 private static native byte[] nativeReadValues(int tag, long ptr); 2330 @FastNative nativeWriteValues(int tag, byte[] src, long ptr)2331 private static native void nativeWriteValues(int tag, byte[] src, long ptr); 2332 @FastNative nativeDump(long ptr)2333 private static native void nativeDump(long ptr) throws IOException; // dump to LOGD 2334 2335 @FastNative nativeGetAllVendorKeys(long ptr, Class keyClass)2336 private static native ArrayList nativeGetAllVendorKeys(long ptr, Class keyClass); 2337 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 2338 @FastNative nativeGetTagFromKeyLocal(long ptr, String keyName)2339 private static native int nativeGetTagFromKeyLocal(long ptr, String keyName) 2340 throws IllegalArgumentException; 2341 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 2342 @FastNative nativeGetTypeFromTagLocal(long ptr, int tag)2343 private static native int nativeGetTypeFromTagLocal(long ptr, int tag) 2344 throws IllegalArgumentException; 2345 @FastNative nativeGetTagFromKey(String keyName, long vendorId)2346 private static native int nativeGetTagFromKey(String keyName, long vendorId) 2347 throws IllegalArgumentException; 2348 @FastNative nativeGetTypeFromTag(int tag, long vendorId)2349 private static native int nativeGetTypeFromTag(int tag, long vendorId) 2350 throws IllegalArgumentException; 2351 2352 /** 2353 * <p>Perform a 0-copy swap of the internal metadata with another object.</p> 2354 * 2355 * <p>Useful to convert a CameraMetadata into e.g. a CaptureRequest.</p> 2356 * 2357 * @param other Metadata to swap with 2358 * @throws NullPointerException if other was null 2359 * @hide 2360 */ swap(CameraMetadataNative other)2361 public synchronized void swap(CameraMetadataNative other) { 2362 nativeSwap(mMetadataPtr, other.mMetadataPtr); 2363 mCameraId = other.mCameraId; 2364 mHasMandatoryConcurrentStreams = other.mHasMandatoryConcurrentStreams; 2365 mDisplaySize = other.mDisplaySize; 2366 mMultiResolutionStreamConfigurationMap = other.mMultiResolutionStreamConfigurationMap; 2367 updateNativeAllocation(); 2368 other.updateNativeAllocation(); 2369 } 2370 2371 /** 2372 * Set the native metadata vendor id. 2373 * 2374 * @hide 2375 */ setVendorId(long vendorId)2376 public synchronized void setVendorId(long vendorId) { 2377 nativeSetVendorId(mMetadataPtr, vendorId); 2378 } 2379 2380 /** 2381 * @hide 2382 */ getEntryCount()2383 public synchronized int getEntryCount() { 2384 return nativeGetEntryCount(mMetadataPtr); 2385 } 2386 2387 /** 2388 * Does this metadata contain at least 1 entry? 2389 * 2390 * @hide 2391 */ isEmpty()2392 public synchronized boolean isEmpty() { 2393 return nativeIsEmpty(mMetadataPtr); 2394 } 2395 2396 2397 /** 2398 * Retrieves the pointer to the native shared_ptr<CameraMetadata> as a Java long. 2399 * 2400 * @hide 2401 */ getMetadataPtr()2402 public long getMetadataPtr() { 2403 return mMetadataPtr; 2404 } 2405 2406 /** 2407 * Return a list containing keys of the given key class for all defined vendor tags. 2408 * 2409 * @hide 2410 */ getAllVendorKeys(Class<K> keyClass)2411 public synchronized <K> ArrayList<K> getAllVendorKeys(Class<K> keyClass) { 2412 if (keyClass == null) { 2413 throw new NullPointerException(); 2414 } 2415 return (ArrayList<K>) nativeGetAllVendorKeys(mMetadataPtr, keyClass); 2416 } 2417 2418 /** 2419 * Convert a key string into the equivalent native tag. 2420 * 2421 * @throws IllegalArgumentException if the key was not recognized 2422 * @throws NullPointerException if the key was null 2423 * 2424 * @hide 2425 */ getTag(String key)2426 public static int getTag(String key) { 2427 return nativeGetTagFromKey(key, Long.MAX_VALUE); 2428 } 2429 2430 /** 2431 * Convert a key string into the equivalent native tag. 2432 * 2433 * @throws IllegalArgumentException if the key was not recognized 2434 * @throws NullPointerException if the key was null 2435 * 2436 * @hide 2437 */ getTag(String key, long vendorId)2438 public static int getTag(String key, long vendorId) { 2439 return nativeGetTagFromKey(key, vendorId); 2440 } 2441 2442 /** 2443 * Get the underlying native type for a tag. 2444 * 2445 * @param tag An integer tag, see e.g. {@link #getTag} 2446 * @param vendorId A vendor tag provider id 2447 * @return An int enum for the metadata type, see e.g. {@link #TYPE_BYTE} 2448 * 2449 * @hide 2450 */ getNativeType(int tag, long vendorId)2451 public static int getNativeType(int tag, long vendorId) { 2452 return nativeGetTypeFromTag(tag, vendorId); 2453 } 2454 2455 /** 2456 * <p>Updates the existing entry for tag with the new bytes pointed by src, erasing 2457 * the entry if src was null.</p> 2458 * 2459 * <p>An empty array can be passed in to update the entry to 0 elements.</p> 2460 * 2461 * @param tag An integer tag, see e.g. {@link #getTag} 2462 * @param src An array of bytes, or null to erase the entry 2463 * 2464 * @hide 2465 */ writeValues(int tag, byte[] src)2466 public synchronized void writeValues(int tag, byte[] src) { 2467 nativeWriteValues(tag, src, mMetadataPtr); 2468 } 2469 2470 /** 2471 * <p>Returns a byte[] of data corresponding to this tag. Use a wrapped bytebuffer to unserialize 2472 * the data properly.</p> 2473 * 2474 * <p>An empty array can be returned to denote an existing entry with 0 elements.</p> 2475 * 2476 * @param tag An integer tag, see e.g. {@link #getTag} 2477 * 2478 * @return {@code null} if there were 0 entries for this tag, a byte[] otherwise. 2479 * @hide 2480 */ readValues(int tag)2481 public synchronized byte[] readValues(int tag) { 2482 // TODO: Optimization. Native code returns a ByteBuffer instead. 2483 return nativeReadValues(tag, mMetadataPtr); 2484 } 2485 2486 /** 2487 * Dumps the native metadata contents to logcat. 2488 * 2489 * <p>Visibility for testing/debugging only. The results will not 2490 * include any synthesized keys, as they are invisible to the native layer.</p> 2491 * 2492 * @hide 2493 */ dumpToLog()2494 public synchronized void dumpToLog() { 2495 try { 2496 nativeDump(mMetadataPtr); 2497 } catch (IOException e) { 2498 Log.wtf(TAG, "Dump logging failed", e); 2499 } 2500 } 2501 2502 @Override finalize()2503 protected void finalize() throws Throwable { 2504 try { 2505 close(); 2506 } finally { 2507 super.finalize(); 2508 } 2509 } 2510 2511 /** 2512 * Get the marshaler compatible with the {@code key} and type {@code T}. 2513 * 2514 * @throws UnsupportedOperationException 2515 * if the native/managed type combination for {@code key} is not supported 2516 */ getMarshalerForKey(Key<T> key, int nativeType)2517 private static <T> Marshaler<T> getMarshalerForKey(Key<T> key, int nativeType) { 2518 return MarshalRegistry.getMarshaler(key.getTypeReference(), 2519 nativeType); 2520 } 2521 2522 @SuppressWarnings({ "unchecked", "rawtypes" }) registerAllMarshalers()2523 private static void registerAllMarshalers() { 2524 if (DEBUG) { 2525 Log.v(TAG, "Shall register metadata marshalers"); 2526 } 2527 2528 MarshalQueryable[] queryList = new MarshalQueryable[] { 2529 // marshalers for standard types 2530 new MarshalQueryablePrimitive(), 2531 new MarshalQueryableEnum(), 2532 new MarshalQueryableArray(), 2533 2534 // pseudo standard types, that expand/narrow the native type into a managed type 2535 new MarshalQueryableBoolean(), 2536 new MarshalQueryableNativeByteToInteger(), 2537 2538 // marshalers for custom types 2539 new MarshalQueryableRect(), 2540 new MarshalQueryableSize(), 2541 new MarshalQueryableSizeF(), 2542 new MarshalQueryableString(), 2543 new MarshalQueryableReprocessFormatsMap(), 2544 new MarshalQueryableRange(), 2545 new MarshalQueryablePair(), 2546 new MarshalQueryableMeteringRectangle(), 2547 new MarshalQueryableColorSpaceTransform(), 2548 new MarshalQueryableStreamConfiguration(), 2549 new MarshalQueryableStreamConfigurationDuration(), 2550 new MarshalQueryableRggbChannelVector(), 2551 new MarshalQueryableBlackLevelPattern(), 2552 new MarshalQueryableHighSpeedVideoConfiguration(), 2553 new MarshalQueryableRecommendedStreamConfiguration(), 2554 2555 // generic parcelable marshaler (MUST BE LAST since it has lowest priority) 2556 new MarshalQueryableParcelable(), 2557 }; 2558 2559 for (MarshalQueryable query : queryList) { 2560 MarshalRegistry.registerMarshalQueryable(query); 2561 } 2562 if (DEBUG) { 2563 Log.v(TAG, "Registered metadata marshalers"); 2564 } 2565 } 2566 2567 /** Check if input arguments are all {@code null}. 2568 * 2569 * @param objs Input arguments for null check 2570 * @return {@code true} if input arguments are all {@code null}, otherwise {@code false} 2571 */ areValuesAllNull(Object... objs)2572 private static boolean areValuesAllNull(Object... objs) { 2573 for (Object o : objs) { 2574 if (o != null) return false; 2575 } 2576 return true; 2577 } 2578 2579 /** 2580 * Return the set of physical camera ids that this logical {@link CameraDevice} is made 2581 * up of. 2582 * 2583 * If the camera device isn't a logical camera, return an empty set. 2584 * 2585 * @hide 2586 */ getPhysicalCameraIds()2587 public Set<String> getPhysicalCameraIds() { 2588 int[] availableCapabilities = get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 2589 if (availableCapabilities == null) { 2590 throw new AssertionError("android.request.availableCapabilities must be non-null " 2591 + "in the characteristics"); 2592 } 2593 2594 if (!ArrayUtils.contains(availableCapabilities, 2595 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA)) { 2596 return Collections.emptySet(); 2597 } 2598 byte[] physicalCamIds = get(CameraCharacteristics.LOGICAL_MULTI_CAMERA_PHYSICAL_IDS); 2599 2600 String physicalCamIdString = null; 2601 try { 2602 physicalCamIdString = new String(physicalCamIds, "UTF-8"); 2603 } catch (java.io.UnsupportedEncodingException e) { 2604 throw new AssertionError("android.logicalCam.physicalIds must be UTF-8 string"); 2605 } 2606 String[] physicalCameraIdArray = physicalCamIdString.split("\0"); 2607 2608 return Collections.unmodifiableSet( 2609 new HashSet<String>(Arrays.asList(physicalCameraIdArray))); 2610 } 2611 2612 static { registerAllMarshalers()2613 registerAllMarshalers(); 2614 } 2615 } 2616