1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.impl; 18 19 import android.annotation.UnsupportedAppUsage; 20 import android.graphics.ImageFormat; 21 import android.graphics.Point; 22 import android.graphics.Rect; 23 import android.hardware.camera2.CameraCharacteristics; 24 import android.hardware.camera2.CameraDevice; 25 import android.hardware.camera2.CaptureRequest; 26 import android.hardware.camera2.CaptureResult; 27 import android.hardware.camera2.marshal.MarshalQueryable; 28 import android.hardware.camera2.marshal.MarshalRegistry; 29 import android.hardware.camera2.marshal.Marshaler; 30 import android.hardware.camera2.marshal.impl.MarshalQueryableArray; 31 import android.hardware.camera2.marshal.impl.MarshalQueryableBlackLevelPattern; 32 import android.hardware.camera2.marshal.impl.MarshalQueryableBoolean; 33 import android.hardware.camera2.marshal.impl.MarshalQueryableColorSpaceTransform; 34 import android.hardware.camera2.marshal.impl.MarshalQueryableEnum; 35 import android.hardware.camera2.marshal.impl.MarshalQueryableHighSpeedVideoConfiguration; 36 import android.hardware.camera2.marshal.impl.MarshalQueryableMeteringRectangle; 37 import android.hardware.camera2.marshal.impl.MarshalQueryableNativeByteToInteger; 38 import android.hardware.camera2.marshal.impl.MarshalQueryablePair; 39 import android.hardware.camera2.marshal.impl.MarshalQueryableParcelable; 40 import android.hardware.camera2.marshal.impl.MarshalQueryablePrimitive; 41 import android.hardware.camera2.marshal.impl.MarshalQueryableRange; 42 import android.hardware.camera2.marshal.impl.MarshalQueryableRecommendedStreamConfiguration; 43 import android.hardware.camera2.marshal.impl.MarshalQueryableRect; 44 import android.hardware.camera2.marshal.impl.MarshalQueryableReprocessFormatsMap; 45 import android.hardware.camera2.marshal.impl.MarshalQueryableRggbChannelVector; 46 import android.hardware.camera2.marshal.impl.MarshalQueryableSize; 47 import android.hardware.camera2.marshal.impl.MarshalQueryableSizeF; 48 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfiguration; 49 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfigurationDuration; 50 import android.hardware.camera2.marshal.impl.MarshalQueryableString; 51 import android.hardware.camera2.params.Face; 52 import android.hardware.camera2.params.HighSpeedVideoConfiguration; 53 import android.hardware.camera2.params.LensShadingMap; 54 import android.hardware.camera2.params.MandatoryStreamCombination; 55 import android.hardware.camera2.params.MandatoryStreamCombination.MandatoryStreamInformation; 56 import android.hardware.camera2.params.OisSample; 57 import android.hardware.camera2.params.RecommendedStreamConfiguration; 58 import android.hardware.camera2.params.RecommendedStreamConfigurationMap; 59 import android.hardware.camera2.params.ReprocessFormatsMap; 60 import android.hardware.camera2.params.StreamConfiguration; 61 import android.hardware.camera2.params.StreamConfigurationDuration; 62 import android.hardware.camera2.params.StreamConfigurationMap; 63 import android.hardware.camera2.params.TonemapCurve; 64 import android.hardware.camera2.utils.TypeReference; 65 import android.location.Location; 66 import android.location.LocationManager; 67 import android.os.Parcel; 68 import android.os.Parcelable; 69 import android.os.ServiceSpecificException; 70 import android.util.Log; 71 import android.util.Size; 72 73 import com.android.internal.util.Preconditions; 74 75 import java.io.IOException; 76 import java.nio.ByteBuffer; 77 import java.nio.ByteOrder; 78 import java.util.ArrayList; 79 import java.util.HashMap; 80 import java.util.List; 81 82 /** 83 * Implementation of camera metadata marshal/unmarshal across Binder to 84 * the camera service 85 */ 86 public class CameraMetadataNative implements Parcelable { 87 88 public static class Key<T> { 89 private boolean mHasTag; 90 private int mTag; 91 private long mVendorId = Long.MAX_VALUE; 92 private final Class<T> mType; 93 private final TypeReference<T> mTypeReference; 94 private final String mName; 95 private final String mFallbackName; 96 private final int mHash; 97 98 /** 99 * @hide 100 */ Key(String name, Class<T> type, long vendorId)101 public Key(String name, Class<T> type, long vendorId) { 102 if (name == null) { 103 throw new NullPointerException("Key needs a valid name"); 104 } else if (type == null) { 105 throw new NullPointerException("Type needs to be non-null"); 106 } 107 mName = name; 108 mFallbackName = null; 109 mType = type; 110 mVendorId = vendorId; 111 mTypeReference = TypeReference.createSpecializedTypeReference(type); 112 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 113 } 114 115 /** 116 * @hide 117 */ Key(String name, String fallbackName, Class<T> type)118 public Key(String name, String fallbackName, Class<T> type) { 119 if (name == null) { 120 throw new NullPointerException("Key needs a valid name"); 121 } else if (type == null) { 122 throw new NullPointerException("Type needs to be non-null"); 123 } 124 mName = name; 125 mFallbackName = fallbackName; 126 mType = type; 127 mTypeReference = TypeReference.createSpecializedTypeReference(type); 128 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 129 } 130 131 /** 132 * Visible for testing only. 133 * 134 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 135 * for application code or vendor-extended keys.</p> 136 */ Key(String name, Class<T> type)137 public Key(String name, Class<T> type) { 138 if (name == null) { 139 throw new NullPointerException("Key needs a valid name"); 140 } else if (type == null) { 141 throw new NullPointerException("Type needs to be non-null"); 142 } 143 mName = name; 144 mFallbackName = null; 145 mType = type; 146 mTypeReference = TypeReference.createSpecializedTypeReference(type); 147 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 148 } 149 150 /** 151 * Visible for testing only. 152 * 153 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 154 * for application code or vendor-extended keys.</p> 155 */ 156 @SuppressWarnings("unchecked") Key(String name, TypeReference<T> typeReference)157 public Key(String name, TypeReference<T> typeReference) { 158 if (name == null) { 159 throw new NullPointerException("Key needs a valid name"); 160 } else if (typeReference == null) { 161 throw new NullPointerException("TypeReference needs to be non-null"); 162 } 163 mName = name; 164 mFallbackName = null; 165 mType = (Class<T>)typeReference.getRawType(); 166 mTypeReference = typeReference; 167 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 168 } 169 170 /** 171 * Return a camelCase, period separated name formatted like: 172 * {@code "root.section[.subsections].name"}. 173 * 174 * <p>Built-in keys exposed by the Android SDK are always prefixed with {@code "android."}; 175 * keys that are device/platform-specific are prefixed with {@code "com."}.</p> 176 * 177 * <p>For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would 178 * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device 179 * specific key might look like {@code "com.google.nexus.data.private"}.</p> 180 * 181 * @return String representation of the key name 182 */ getName()183 public final String getName() { 184 return mName; 185 } 186 187 /** 188 * {@inheritDoc} 189 */ 190 @Override hashCode()191 public final int hashCode() { 192 return mHash; 193 } 194 195 /** 196 * Compare this key against other native keys, request keys, result keys, and 197 * characteristics keys. 198 * 199 * <p>Two keys are considered equal if their name and type reference are equal.</p> 200 * 201 * <p>Note that the equality against non-native keys is one-way. A native key may be equal 202 * to a result key; but that same result key will not be equal to a native key.</p> 203 */ 204 @SuppressWarnings("rawtypes") 205 @Override equals(Object o)206 public final boolean equals(Object o) { 207 if (this == o) { 208 return true; 209 } 210 211 if (o == null || this.hashCode() != o.hashCode()) { 212 return false; 213 } 214 215 Key<?> lhs; 216 217 if (o instanceof CaptureResult.Key) { 218 lhs = ((CaptureResult.Key)o).getNativeKey(); 219 } else if (o instanceof CaptureRequest.Key) { 220 lhs = ((CaptureRequest.Key)o).getNativeKey(); 221 } else if (o instanceof CameraCharacteristics.Key) { 222 lhs = ((CameraCharacteristics.Key)o).getNativeKey(); 223 } else if ((o instanceof Key)) { 224 lhs = (Key<?>)o; 225 } else { 226 return false; 227 } 228 229 return mName.equals(lhs.mName) && mTypeReference.equals(lhs.mTypeReference); 230 } 231 232 /** 233 * <p> 234 * Get the tag corresponding to this key. This enables insertion into the 235 * native metadata. 236 * </p> 237 * 238 * <p>This value is looked up the first time, and cached subsequently.</p> 239 * 240 * @return The tag numeric value corresponding to the string 241 */ 242 @UnsupportedAppUsage getTag()243 public final int getTag() { 244 if (!mHasTag) { 245 mTag = CameraMetadataNative.getTag(mName, mVendorId); 246 mHasTag = true; 247 } 248 return mTag; 249 } 250 251 /** 252 * Get the raw class backing the type {@code T} for this key. 253 * 254 * <p>The distinction is only important if {@code T} is a generic, e.g. 255 * {@code Range<Integer>} since the nested type will be erased.</p> 256 */ getType()257 public final Class<T> getType() { 258 // TODO: remove this; other places should use #getTypeReference() instead 259 return mType; 260 } 261 262 /** 263 * Get the vendor tag provider id. 264 * 265 * @hide 266 */ getVendorId()267 public final long getVendorId() { 268 return mVendorId; 269 } 270 271 /** 272 * Get the type reference backing the type {@code T} for this key. 273 * 274 * <p>The distinction is only important if {@code T} is a generic, e.g. 275 * {@code Range<Integer>} since the nested type will be retained.</p> 276 */ getTypeReference()277 public final TypeReference<T> getTypeReference() { 278 return mTypeReference; 279 } 280 } 281 282 private static final String TAG = "CameraMetadataJV"; 283 private static final boolean DEBUG = false; 284 285 // this should be in sync with HAL_PIXEL_FORMAT_BLOB defined in graphics.h 286 public static final int NATIVE_JPEG_FORMAT = 0x21; 287 288 private static final String CELLID_PROCESS = "CELLID"; 289 private static final String GPS_PROCESS = "GPS"; 290 private static final int FACE_LANDMARK_SIZE = 6; 291 translateLocationProviderToProcess(final String provider)292 private static String translateLocationProviderToProcess(final String provider) { 293 if (provider == null) { 294 return null; 295 } 296 switch(provider) { 297 case LocationManager.GPS_PROVIDER: 298 return GPS_PROCESS; 299 case LocationManager.NETWORK_PROVIDER: 300 return CELLID_PROCESS; 301 default: 302 return null; 303 } 304 } 305 translateProcessToLocationProvider(final String process)306 private static String translateProcessToLocationProvider(final String process) { 307 if (process == null) { 308 return null; 309 } 310 switch(process) { 311 case GPS_PROCESS: 312 return LocationManager.GPS_PROVIDER; 313 case CELLID_PROCESS: 314 return LocationManager.NETWORK_PROVIDER; 315 default: 316 return null; 317 } 318 } 319 CameraMetadataNative()320 public CameraMetadataNative() { 321 super(); 322 mMetadataPtr = nativeAllocate(); 323 if (mMetadataPtr == 0) { 324 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 325 } 326 } 327 328 /** 329 * Copy constructor - clone metadata 330 */ CameraMetadataNative(CameraMetadataNative other)331 public CameraMetadataNative(CameraMetadataNative other) { 332 super(); 333 mMetadataPtr = nativeAllocateCopy(other); 334 if (mMetadataPtr == 0) { 335 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 336 } 337 } 338 339 /** 340 * Move the contents from {@code other} into a new camera metadata instance.</p> 341 * 342 * <p>After this call, {@code other} will become empty.</p> 343 * 344 * @param other the previous metadata instance which will get pilfered 345 * @return a new metadata instance with the values from {@code other} moved into it 346 */ move(CameraMetadataNative other)347 public static CameraMetadataNative move(CameraMetadataNative other) { 348 CameraMetadataNative newObject = new CameraMetadataNative(); 349 newObject.swap(other); 350 return newObject; 351 } 352 353 public static final @android.annotation.NonNull Parcelable.Creator<CameraMetadataNative> CREATOR = 354 new Parcelable.Creator<CameraMetadataNative>() { 355 @Override 356 public CameraMetadataNative createFromParcel(Parcel in) { 357 CameraMetadataNative metadata = new CameraMetadataNative(); 358 metadata.readFromParcel(in); 359 return metadata; 360 } 361 362 @Override 363 public CameraMetadataNative[] newArray(int size) { 364 return new CameraMetadataNative[size]; 365 } 366 }; 367 368 @Override describeContents()369 public int describeContents() { 370 return 0; 371 } 372 373 @Override writeToParcel(Parcel dest, int flags)374 public void writeToParcel(Parcel dest, int flags) { 375 nativeWriteToParcel(dest); 376 } 377 378 /** 379 * @hide 380 */ get(CameraCharacteristics.Key<T> key)381 public <T> T get(CameraCharacteristics.Key<T> key) { 382 return get(key.getNativeKey()); 383 } 384 385 /** 386 * @hide 387 */ get(CaptureResult.Key<T> key)388 public <T> T get(CaptureResult.Key<T> key) { 389 return get(key.getNativeKey()); 390 } 391 392 /** 393 * @hide 394 */ get(CaptureRequest.Key<T> key)395 public <T> T get(CaptureRequest.Key<T> key) { 396 return get(key.getNativeKey()); 397 } 398 399 /** 400 * Look-up a metadata field value by its key. 401 * 402 * @param key a non-{@code null} key instance 403 * @return the field corresponding to the {@code key}, or {@code null} if no value was set 404 */ get(Key<T> key)405 public <T> T get(Key<T> key) { 406 Preconditions.checkNotNull(key, "key must not be null"); 407 408 // Check if key has been overridden to use a wrapper class on the java side. 409 GetCommand g = sGetCommandMap.get(key); 410 if (g != null) { 411 return g.getValue(this, key); 412 } 413 return getBase(key); 414 } 415 readFromParcel(Parcel in)416 public void readFromParcel(Parcel in) { 417 nativeReadFromParcel(in); 418 } 419 420 /** 421 * Set the global client-side vendor tag descriptor to allow use of vendor 422 * tags in camera applications. 423 * 424 * @throws ServiceSpecificException 425 * @hide 426 */ setupGlobalVendorTagDescriptor()427 public static void setupGlobalVendorTagDescriptor() throws ServiceSpecificException { 428 int err = nativeSetupGlobalVendorTagDescriptor(); 429 if (err != 0) { 430 throw new ServiceSpecificException(err, "Failure to set up global vendor tags"); 431 } 432 } 433 434 /** 435 * Set the global client-side vendor tag descriptor to allow use of vendor 436 * tags in camera applications. 437 * 438 * @return int An error code corresponding to one of the 439 * {@link ICameraService} error constants, or 0 on success. 440 */ nativeSetupGlobalVendorTagDescriptor()441 private static native int nativeSetupGlobalVendorTagDescriptor(); 442 443 /** 444 * Set a camera metadata field to a value. The field definitions can be 445 * found in {@link CameraCharacteristics}, {@link CaptureResult}, and 446 * {@link CaptureRequest}. 447 * 448 * @param key The metadata field to write. 449 * @param value The value to set the field to, which must be of a matching 450 * type to the key. 451 */ set(Key<T> key, T value)452 public <T> void set(Key<T> key, T value) { 453 SetCommand s = sSetCommandMap.get(key); 454 if (s != null) { 455 s.setValue(this, value); 456 return; 457 } 458 459 setBase(key, value); 460 } 461 set(CaptureRequest.Key<T> key, T value)462 public <T> void set(CaptureRequest.Key<T> key, T value) { 463 set(key.getNativeKey(), value); 464 } 465 set(CaptureResult.Key<T> key, T value)466 public <T> void set(CaptureResult.Key<T> key, T value) { 467 set(key.getNativeKey(), value); 468 } 469 set(CameraCharacteristics.Key<T> key, T value)470 public <T> void set(CameraCharacteristics.Key<T> key, T value) { 471 set(key.getNativeKey(), value); 472 } 473 474 // Keep up-to-date with camera_metadata.h 475 /** 476 * @hide 477 */ 478 public static final int TYPE_BYTE = 0; 479 /** 480 * @hide 481 */ 482 public static final int TYPE_INT32 = 1; 483 /** 484 * @hide 485 */ 486 public static final int TYPE_FLOAT = 2; 487 /** 488 * @hide 489 */ 490 public static final int TYPE_INT64 = 3; 491 /** 492 * @hide 493 */ 494 public static final int TYPE_DOUBLE = 4; 495 /** 496 * @hide 497 */ 498 public static final int TYPE_RATIONAL = 5; 499 /** 500 * @hide 501 */ 502 public static final int NUM_TYPES = 6; 503 close()504 private void close() { 505 // this sets mMetadataPtr to 0 506 nativeClose(); 507 mMetadataPtr = 0; // set it to 0 again to prevent eclipse from making this field final 508 } 509 getBase(CameraCharacteristics.Key<T> key)510 private <T> T getBase(CameraCharacteristics.Key<T> key) { 511 return getBase(key.getNativeKey()); 512 } 513 getBase(CaptureResult.Key<T> key)514 private <T> T getBase(CaptureResult.Key<T> key) { 515 return getBase(key.getNativeKey()); 516 } 517 getBase(CaptureRequest.Key<T> key)518 private <T> T getBase(CaptureRequest.Key<T> key) { 519 return getBase(key.getNativeKey()); 520 } 521 getBase(Key<T> key)522 private <T> T getBase(Key<T> key) { 523 int tag = nativeGetTagFromKeyLocal(key.getName()); 524 byte[] values = readValues(tag); 525 if (values == null) { 526 // If the key returns null, use the fallback key if exists. 527 // This is to support old key names for the newly published keys. 528 if (key.mFallbackName == null) { 529 return null; 530 } 531 tag = nativeGetTagFromKeyLocal(key.mFallbackName); 532 values = readValues(tag); 533 if (values == null) { 534 return null; 535 } 536 } 537 538 int nativeType = nativeGetTypeFromTagLocal(tag); 539 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 540 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 541 return marshaler.unmarshal(buffer); 542 } 543 544 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 545 // metadata. 546 private static final HashMap<Key<?>, GetCommand> sGetCommandMap = 547 new HashMap<Key<?>, GetCommand>(); 548 static { 549 sGetCommandMap.put( GetCommand()550 CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), new GetCommand() { 551 @Override 552 @SuppressWarnings("unchecked") 553 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 554 return (T) metadata.getAvailableFormats(); 555 } 556 }); 557 sGetCommandMap.put( GetCommand()558 CaptureResult.STATISTICS_FACES.getNativeKey(), new GetCommand() { 559 @Override 560 @SuppressWarnings("unchecked") 561 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 562 return (T) metadata.getFaces(); 563 } 564 }); 565 sGetCommandMap.put( GetCommand()566 CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), new GetCommand() { 567 @Override 568 @SuppressWarnings("unchecked") 569 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 570 return (T) metadata.getFaceRectangles(); 571 } 572 }); 573 sGetCommandMap.put( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey()574 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey(), 575 new GetCommand() { 576 @Override 577 @SuppressWarnings("unchecked") 578 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 579 return (T) metadata.getStreamConfigurationMap(); 580 } 581 }); 582 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey()583 CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey(), 584 new GetCommand() { 585 @Override 586 @SuppressWarnings("unchecked") 587 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 588 return (T) metadata.getMandatoryStreamCombinations(); 589 } 590 }); 591 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey()592 CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey(), new GetCommand() { 593 @Override 594 @SuppressWarnings("unchecked") 595 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 596 return (T) metadata.getMaxRegions(key); 597 } 598 }); 599 sGetCommandMap.put( GetCommand()600 CameraCharacteristics.CONTROL_MAX_REGIONS_AWB.getNativeKey(), new GetCommand() { 601 @Override 602 @SuppressWarnings("unchecked") 603 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 604 return (T) metadata.getMaxRegions(key); 605 } 606 }); 607 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey()608 CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey(), new GetCommand() { 609 @Override 610 @SuppressWarnings("unchecked") 611 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 612 return (T) metadata.getMaxRegions(key); 613 } 614 }); 615 sGetCommandMap.put( GetCommand()616 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW.getNativeKey(), new GetCommand() { 617 @Override 618 @SuppressWarnings("unchecked") 619 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 620 return (T) metadata.getMaxNumOutputs(key); 621 } 622 }); 623 sGetCommandMap.put( GetCommand()624 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC.getNativeKey(), new GetCommand() { 625 @Override 626 @SuppressWarnings("unchecked") 627 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 628 return (T) metadata.getMaxNumOutputs(key); 629 } 630 }); 631 sGetCommandMap.put( CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey()632 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey(), 633 new GetCommand() { 634 @Override 635 @SuppressWarnings("unchecked") 636 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 637 return (T) metadata.getMaxNumOutputs(key); 638 } 639 }); 640 sGetCommandMap.put( GetCommand()641 CaptureRequest.TONEMAP_CURVE.getNativeKey(), new GetCommand() { 642 @Override 643 @SuppressWarnings("unchecked") 644 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 645 return (T) metadata.getTonemapCurve(); 646 } 647 }); 648 sGetCommandMap.put( GetCommand()649 CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new GetCommand() { 650 @Override 651 @SuppressWarnings("unchecked") 652 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 653 return (T) metadata.getGpsLocation(); 654 } 655 }); 656 sGetCommandMap.put( CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey()657 CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey(), 658 new GetCommand() { 659 @Override 660 @SuppressWarnings("unchecked") 661 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 662 return (T) metadata.getLensShadingMap(); 663 } 664 }); 665 sGetCommandMap.put( CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey()666 CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey(), 667 new GetCommand() { 668 @Override 669 @SuppressWarnings("unchecked") 670 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 671 return (T) metadata.getOisSamples(); 672 } 673 }); 674 } 675 getAvailableFormats()676 private int[] getAvailableFormats() { 677 int[] availableFormats = getBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS); 678 if (availableFormats != null) { 679 for (int i = 0; i < availableFormats.length; i++) { 680 // JPEG has different value between native and managed side, need override. 681 if (availableFormats[i] == NATIVE_JPEG_FORMAT) { 682 availableFormats[i] = ImageFormat.JPEG; 683 } 684 } 685 } 686 687 return availableFormats; 688 } 689 setFaces(Face[] faces)690 private boolean setFaces(Face[] faces) { 691 if (faces == null) { 692 return false; 693 } 694 695 int numFaces = faces.length; 696 697 // Detect if all faces are SIMPLE or not; count # of valid faces 698 boolean fullMode = true; 699 for (Face face : faces) { 700 if (face == null) { 701 numFaces--; 702 Log.w(TAG, "setFaces - null face detected, skipping"); 703 continue; 704 } 705 706 if (face.getId() == Face.ID_UNSUPPORTED) { 707 fullMode = false; 708 } 709 } 710 711 Rect[] faceRectangles = new Rect[numFaces]; 712 byte[] faceScores = new byte[numFaces]; 713 int[] faceIds = null; 714 int[] faceLandmarks = null; 715 716 if (fullMode) { 717 faceIds = new int[numFaces]; 718 faceLandmarks = new int[numFaces * FACE_LANDMARK_SIZE]; 719 } 720 721 int i = 0; 722 for (Face face : faces) { 723 if (face == null) { 724 continue; 725 } 726 727 faceRectangles[i] = face.getBounds(); 728 faceScores[i] = (byte)face.getScore(); 729 730 if (fullMode) { 731 faceIds[i] = face.getId(); 732 733 int j = 0; 734 735 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().x; 736 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().y; 737 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().x; 738 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().y; 739 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().x; 740 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().y; 741 } 742 743 i++; 744 } 745 746 set(CaptureResult.STATISTICS_FACE_RECTANGLES, faceRectangles); 747 set(CaptureResult.STATISTICS_FACE_IDS, faceIds); 748 set(CaptureResult.STATISTICS_FACE_LANDMARKS, faceLandmarks); 749 set(CaptureResult.STATISTICS_FACE_SCORES, faceScores); 750 751 return true; 752 } 753 getFaces()754 private Face[] getFaces() { 755 Integer faceDetectMode = get(CaptureResult.STATISTICS_FACE_DETECT_MODE); 756 byte[] faceScores = get(CaptureResult.STATISTICS_FACE_SCORES); 757 Rect[] faceRectangles = get(CaptureResult.STATISTICS_FACE_RECTANGLES); 758 int[] faceIds = get(CaptureResult.STATISTICS_FACE_IDS); 759 int[] faceLandmarks = get(CaptureResult.STATISTICS_FACE_LANDMARKS); 760 761 if (areValuesAllNull(faceDetectMode, faceScores, faceRectangles, faceIds, faceLandmarks)) { 762 return null; 763 } 764 765 if (faceDetectMode == null) { 766 Log.w(TAG, "Face detect mode metadata is null, assuming the mode is SIMPLE"); 767 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 768 } else if (faceDetectMode > CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 769 // Face detect mode is larger than FULL, assuming the mode is FULL 770 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL; 771 } else { 772 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) { 773 return new Face[0]; 774 } 775 if (faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE && 776 faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 777 Log.w(TAG, "Unknown face detect mode: " + faceDetectMode); 778 return new Face[0]; 779 } 780 } 781 782 // Face scores and rectangles are required by SIMPLE and FULL mode. 783 if (faceScores == null || faceRectangles == null) { 784 Log.w(TAG, "Expect face scores and rectangles to be non-null"); 785 return new Face[0]; 786 } else if (faceScores.length != faceRectangles.length) { 787 Log.w(TAG, String.format("Face score size(%d) doesn match face rectangle size(%d)!", 788 faceScores.length, faceRectangles.length)); 789 } 790 791 // To be safe, make number of faces is the minimal of all face info metadata length. 792 int numFaces = Math.min(faceScores.length, faceRectangles.length); 793 // Face id and landmarks are only required by FULL mode. 794 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 795 if (faceIds == null || faceLandmarks == null) { 796 Log.w(TAG, "Expect face ids and landmarks to be non-null for FULL mode," + 797 "fallback to SIMPLE mode"); 798 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 799 } else { 800 if (faceIds.length != numFaces || 801 faceLandmarks.length != numFaces * FACE_LANDMARK_SIZE) { 802 Log.w(TAG, String.format("Face id size(%d), or face landmark size(%d) don't" + 803 "match face number(%d)!", 804 faceIds.length, faceLandmarks.length * FACE_LANDMARK_SIZE, numFaces)); 805 } 806 // To be safe, make number of faces is the minimal of all face info metadata length. 807 numFaces = Math.min(numFaces, faceIds.length); 808 numFaces = Math.min(numFaces, faceLandmarks.length / FACE_LANDMARK_SIZE); 809 } 810 } 811 812 ArrayList<Face> faceList = new ArrayList<Face>(); 813 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) { 814 for (int i = 0; i < numFaces; i++) { 815 if (faceScores[i] <= Face.SCORE_MAX && 816 faceScores[i] >= Face.SCORE_MIN) { 817 faceList.add(new Face(faceRectangles[i], faceScores[i])); 818 } 819 } 820 } else { 821 // CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL 822 for (int i = 0; i < numFaces; i++) { 823 if (faceScores[i] <= Face.SCORE_MAX && 824 faceScores[i] >= Face.SCORE_MIN && 825 faceIds[i] >= 0) { 826 Point leftEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE], 827 faceLandmarks[i*FACE_LANDMARK_SIZE+1]); 828 Point rightEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+2], 829 faceLandmarks[i*FACE_LANDMARK_SIZE+3]); 830 Point mouth = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+4], 831 faceLandmarks[i*FACE_LANDMARK_SIZE+5]); 832 Face face = new Face(faceRectangles[i], faceScores[i], faceIds[i], 833 leftEye, rightEye, mouth); 834 faceList.add(face); 835 } 836 } 837 } 838 Face[] faces = new Face[faceList.size()]; 839 faceList.toArray(faces); 840 return faces; 841 } 842 843 // Face rectangles are defined as (left, top, right, bottom) instead of 844 // (left, top, width, height) at the native level, so the normal Rect 845 // conversion that does (l, t, w, h) -> (l, t, r, b) is unnecessary. Undo 846 // that conversion here for just the faces. getFaceRectangles()847 private Rect[] getFaceRectangles() { 848 Rect[] faceRectangles = getBase(CaptureResult.STATISTICS_FACE_RECTANGLES); 849 if (faceRectangles == null) return null; 850 851 Rect[] fixedFaceRectangles = new Rect[faceRectangles.length]; 852 for (int i = 0; i < faceRectangles.length; i++) { 853 fixedFaceRectangles[i] = new Rect( 854 faceRectangles[i].left, 855 faceRectangles[i].top, 856 faceRectangles[i].right - faceRectangles[i].left, 857 faceRectangles[i].bottom - faceRectangles[i].top); 858 } 859 return fixedFaceRectangles; 860 } 861 getLensShadingMap()862 private LensShadingMap getLensShadingMap() { 863 float[] lsmArray = getBase(CaptureResult.STATISTICS_LENS_SHADING_MAP); 864 Size s = get(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE); 865 866 // Do not warn if lsmArray is null while s is not. This is valid. 867 if (lsmArray == null) { 868 return null; 869 } 870 871 if (s == null) { 872 Log.w(TAG, "getLensShadingMap - Lens shading map size was null."); 873 return null; 874 } 875 876 LensShadingMap map = new LensShadingMap(lsmArray, s.getHeight(), s.getWidth()); 877 return map; 878 } 879 getGpsLocation()880 private Location getGpsLocation() { 881 String processingMethod = get(CaptureResult.JPEG_GPS_PROCESSING_METHOD); 882 double[] coords = get(CaptureResult.JPEG_GPS_COORDINATES); 883 Long timeStamp = get(CaptureResult.JPEG_GPS_TIMESTAMP); 884 885 if (areValuesAllNull(processingMethod, coords, timeStamp)) { 886 return null; 887 } 888 889 Location l = new Location(translateProcessToLocationProvider(processingMethod)); 890 if (timeStamp != null) { 891 // Location expects timestamp in [ms.] 892 l.setTime(timeStamp * 1000); 893 } else { 894 Log.w(TAG, "getGpsLocation - No timestamp for GPS location."); 895 } 896 897 if (coords != null) { 898 l.setLatitude(coords[0]); 899 l.setLongitude(coords[1]); 900 l.setAltitude(coords[2]); 901 } else { 902 Log.w(TAG, "getGpsLocation - No coordinates for GPS location"); 903 } 904 905 return l; 906 } 907 setGpsLocation(Location l)908 private boolean setGpsLocation(Location l) { 909 if (l == null) { 910 return false; 911 } 912 913 double[] coords = { l.getLatitude(), l.getLongitude(), l.getAltitude() }; 914 String processMethod = translateLocationProviderToProcess(l.getProvider()); 915 //JPEG_GPS_TIMESTAMP expects sec. instead of msec. 916 long timestamp = l.getTime() / 1000; 917 918 set(CaptureRequest.JPEG_GPS_TIMESTAMP, timestamp); 919 set(CaptureRequest.JPEG_GPS_COORDINATES, coords); 920 921 if (processMethod == null) { 922 Log.w(TAG, "setGpsLocation - No process method, Location is not from a GPS or NETWORK" + 923 "provider"); 924 } else { 925 setBase(CaptureRequest.JPEG_GPS_PROCESSING_METHOD, processMethod); 926 } 927 return true; 928 } 929 parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, StreamConfigurationMap fullMap, boolean isDepth, ArrayList<ArrayList<StreamConfiguration>> streamConfigList, ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList, ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList, boolean[] supportsPrivate)930 private void parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, 931 StreamConfigurationMap fullMap, boolean isDepth, 932 ArrayList<ArrayList<StreamConfiguration>> /*out*/streamConfigList, 933 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamDurationList, 934 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamStallList, 935 boolean[] /*out*/supportsPrivate) { 936 937 streamConfigList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 938 streamDurationList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 939 streamStallList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 940 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 941 streamConfigList.add(new ArrayList<StreamConfiguration> ()); 942 streamDurationList.add(new ArrayList<StreamConfigurationDuration> ()); 943 streamStallList.add(new ArrayList<StreamConfigurationDuration> ()); 944 } 945 946 for (RecommendedStreamConfiguration c : configurations) { 947 int width = c.getWidth(); 948 int height = c.getHeight(); 949 int internalFormat = c.getFormat(); 950 int publicFormat = 951 (isDepth) ? StreamConfigurationMap.depthFormatToPublic(internalFormat) : 952 StreamConfigurationMap.imageFormatToPublic(internalFormat); 953 Size sz = new Size(width, height); 954 int usecaseBitmap = c.getUsecaseBitmap(); 955 956 if (!c.isInput()) { 957 StreamConfigurationDuration minDurationConfiguration = null; 958 StreamConfigurationDuration stallDurationConfiguration = null; 959 960 StreamConfiguration streamConfiguration = new StreamConfiguration(internalFormat, 961 width, height, /*input*/ false); 962 963 long minFrameDuration = fullMap.getOutputMinFrameDuration(publicFormat, sz); 964 if (minFrameDuration > 0) { 965 minDurationConfiguration = new StreamConfigurationDuration(internalFormat, 966 width, height, minFrameDuration); 967 } 968 969 long stallDuration = fullMap.getOutputStallDuration(publicFormat, sz); 970 if (stallDuration > 0) { 971 stallDurationConfiguration = new StreamConfigurationDuration(internalFormat, 972 width, height, stallDuration); 973 } 974 975 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 976 if ((usecaseBitmap & (1 << i)) != 0) { 977 ArrayList<StreamConfiguration> sc = streamConfigList.get(i); 978 sc.add(streamConfiguration); 979 980 if (minFrameDuration > 0) { 981 ArrayList<StreamConfigurationDuration> scd = streamDurationList.get(i); 982 scd.add(minDurationConfiguration); 983 } 984 985 if (stallDuration > 0) { 986 ArrayList<StreamConfigurationDuration> scs = streamStallList.get(i); 987 scs.add(stallDurationConfiguration); 988 } 989 990 if ((supportsPrivate != null) && !supportsPrivate[i] && 991 (publicFormat == ImageFormat.PRIVATE)) { 992 supportsPrivate[i] = true; 993 } 994 } 995 } 996 } else { 997 if (usecaseBitmap != (1 << RecommendedStreamConfigurationMap.USECASE_ZSL)) { 998 throw new IllegalArgumentException("Recommended input stream configurations " + 999 "should only be advertised in the ZSL use case!"); 1000 } 1001 1002 ArrayList<StreamConfiguration> sc = streamConfigList.get( 1003 RecommendedStreamConfigurationMap.USECASE_ZSL); 1004 sc.add(new StreamConfiguration(internalFormat, 1005 width, height, /*input*/ true)); 1006 } 1007 } 1008 } 1009 1010 private class StreamConfigurationData { 1011 StreamConfiguration [] streamConfigurationArray = null; 1012 StreamConfigurationDuration [] minDurationArray = null; 1013 StreamConfigurationDuration [] stallDurationArray = null; 1014 } 1015 initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, StreamConfigurationData scData)1016 public void initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, 1017 ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, 1018 StreamConfigurationData /*out*/scData) { 1019 if ((scData == null) || (sc == null)) { 1020 return; 1021 } 1022 1023 scData.streamConfigurationArray = new StreamConfiguration[sc.size()]; 1024 scData.streamConfigurationArray = sc.toArray(scData.streamConfigurationArray); 1025 1026 if ((scd != null) && !scd.isEmpty()) { 1027 scData.minDurationArray = new StreamConfigurationDuration[scd.size()]; 1028 scData.minDurationArray = scd.toArray(scData.minDurationArray); 1029 } else { 1030 scData.minDurationArray = new StreamConfigurationDuration[0]; 1031 } 1032 1033 if ((scs != null) && !scs.isEmpty()) { 1034 scData.stallDurationArray = new StreamConfigurationDuration[scs.size()]; 1035 scData.stallDurationArray = scs.toArray(scData.stallDurationArray); 1036 } else { 1037 scData.stallDurationArray = new StreamConfigurationDuration[0]; 1038 } 1039 } 1040 1041 /** 1042 * Retrieve the list of recommended stream configurations. 1043 * 1044 * @return A list of recommended stream configuration maps for each common use case or null 1045 * in case the recommended stream configurations are invalid or incomplete. 1046 * @hide 1047 */ getRecommendedStreamConfigurations()1048 public ArrayList<RecommendedStreamConfigurationMap> getRecommendedStreamConfigurations() { 1049 RecommendedStreamConfiguration[] configurations = getBase( 1050 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS); 1051 RecommendedStreamConfiguration[] depthConfigurations = getBase( 1052 CameraCharacteristics.DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS); 1053 if ((configurations == null) && (depthConfigurations == null)) { 1054 return null; 1055 } 1056 1057 StreamConfigurationMap fullMap = getStreamConfigurationMap(); 1058 ArrayList<RecommendedStreamConfigurationMap> recommendedConfigurations = 1059 new ArrayList<RecommendedStreamConfigurationMap> (); 1060 1061 ArrayList<ArrayList<StreamConfiguration>> streamConfigList = 1062 new ArrayList<ArrayList<StreamConfiguration>>(); 1063 ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList = 1064 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1065 ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList = 1066 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1067 boolean[] supportsPrivate = 1068 new boolean[RecommendedStreamConfigurationMap.MAX_USECASE_COUNT]; 1069 try { 1070 if (configurations != null) { 1071 parseRecommendedConfigurations(configurations, fullMap, /*isDepth*/ false, 1072 streamConfigList, streamDurationList, streamStallList, supportsPrivate); 1073 } 1074 } catch (IllegalArgumentException e) { 1075 Log.e(TAG, "Failed parsing the recommended stream configurations!"); 1076 return null; 1077 } 1078 1079 ArrayList<ArrayList<StreamConfiguration>> depthStreamConfigList = 1080 new ArrayList<ArrayList<StreamConfiguration>>(); 1081 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamDurationList = 1082 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1083 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamStallList = 1084 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1085 if (depthConfigurations != null) { 1086 try { 1087 parseRecommendedConfigurations(depthConfigurations, fullMap, /*isDepth*/ true, 1088 depthStreamConfigList, depthStreamDurationList, depthStreamStallList, 1089 /*supportsPrivate*/ null); 1090 } catch (IllegalArgumentException e) { 1091 Log.e(TAG, "Failed parsing the recommended depth stream configurations!"); 1092 return null; 1093 } 1094 } 1095 1096 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1097 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP); 1098 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1099 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1100 boolean listHighResolution = isBurstSupported(); 1101 recommendedConfigurations.ensureCapacity( 1102 RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1103 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1104 StreamConfigurationData scData = new StreamConfigurationData(); 1105 if (configurations != null) { 1106 initializeStreamConfigurationData(streamConfigList.get(i), 1107 streamDurationList.get(i), streamStallList.get(i), scData); 1108 } 1109 1110 StreamConfigurationData depthScData = new StreamConfigurationData(); 1111 if (depthConfigurations != null) { 1112 initializeStreamConfigurationData(depthStreamConfigList.get(i), 1113 depthStreamDurationList.get(i), depthStreamStallList.get(i), depthScData); 1114 } 1115 1116 if ((scData.streamConfigurationArray == null || 1117 scData.streamConfigurationArray.length == 0) && 1118 (depthScData.streamConfigurationArray == null || 1119 depthScData.streamConfigurationArray.length == 0)) { 1120 recommendedConfigurations.add(null); 1121 continue; 1122 } 1123 1124 // Dynamic depth streams involve alot of SW processing and currently cannot be 1125 // recommended. 1126 StreamConfigurationMap map = null; 1127 switch (i) { 1128 case RecommendedStreamConfigurationMap.USECASE_PREVIEW: 1129 case RecommendedStreamConfigurationMap.USECASE_RAW: 1130 case RecommendedStreamConfigurationMap.USECASE_LOW_LATENCY_SNAPSHOT: 1131 case RecommendedStreamConfigurationMap.USECASE_VIDEO_SNAPSHOT: 1132 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1133 scData.minDurationArray, scData.stallDurationArray, 1134 /*depthconfiguration*/ null, /*depthminduration*/ null, 1135 /*depthstallduration*/ null, 1136 /*dynamicDepthConfigurations*/ null, 1137 /*dynamicDepthMinFrameDurations*/ null, 1138 /*dynamicDepthStallDurations*/ null, 1139 /*heicconfiguration*/ null, 1140 /*heicminduration*/ null, 1141 /*heicstallduration*/ null, 1142 /*highspeedvideoconfigurations*/ null, 1143 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1144 break; 1145 case RecommendedStreamConfigurationMap.USECASE_RECORD: 1146 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1147 scData.minDurationArray, scData.stallDurationArray, 1148 /*depthconfiguration*/ null, /*depthminduration*/ null, 1149 /*depthstallduration*/ null, 1150 /*dynamicDepthConfigurations*/ null, 1151 /*dynamicDepthMinFrameDurations*/ null, 1152 /*dynamicDepthStallDurations*/ null, 1153 /*heicconfiguration*/ null, 1154 /*heicminduration*/ null, 1155 /*heicstallduration*/ null, 1156 highSpeedVideoConfigurations, 1157 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1158 break; 1159 case RecommendedStreamConfigurationMap.USECASE_ZSL: 1160 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1161 scData.minDurationArray, scData.stallDurationArray, 1162 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1163 depthScData.stallDurationArray, 1164 /*dynamicDepthConfigurations*/ null, 1165 /*dynamicDepthMinFrameDurations*/ null, 1166 /*dynamicDepthStallDurations*/ null, 1167 /*heicconfiguration*/ null, 1168 /*heicminduration*/ null, 1169 /*heicstallduration*/ null, 1170 /*highSpeedVideoConfigurations*/ null, 1171 inputOutputFormatsMap, listHighResolution, supportsPrivate[i]); 1172 break; 1173 default: 1174 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1175 scData.minDurationArray, scData.stallDurationArray, 1176 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1177 depthScData.stallDurationArray, 1178 /*dynamicDepthConfigurations*/ null, 1179 /*dynamicDepthMinFrameDurations*/ null, 1180 /*dynamicDepthStallDurations*/ null, 1181 /*heicconfiguration*/ null, 1182 /*heicminduration*/ null, 1183 /*heicstallduration*/ null, 1184 /*highSpeedVideoConfigurations*/ null, 1185 /*inputOutputFormatsMap*/ null, listHighResolution, supportsPrivate[i]); 1186 } 1187 1188 recommendedConfigurations.add(new RecommendedStreamConfigurationMap(map, /*usecase*/i, 1189 supportsPrivate[i])); 1190 } 1191 1192 return recommendedConfigurations; 1193 } 1194 isBurstSupported()1195 private boolean isBurstSupported() { 1196 boolean ret = false; 1197 1198 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1199 for (int capability : capabilities) { 1200 if (capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE) { 1201 ret = true; 1202 break; 1203 } 1204 } 1205 1206 return ret; 1207 } 1208 getMandatoryStreamCombinations()1209 private MandatoryStreamCombination[] getMandatoryStreamCombinations() { 1210 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1211 ArrayList<Integer> caps = new ArrayList<Integer>(); 1212 caps.ensureCapacity(capabilities.length); 1213 for (int c : capabilities) { 1214 caps.add(new Integer(c)); 1215 } 1216 int hwLevel = getBase(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); 1217 MandatoryStreamCombination.Builder build = new MandatoryStreamCombination.Builder( 1218 mCameraId, hwLevel, mDisplaySize, caps, getStreamConfigurationMap()); 1219 List<MandatoryStreamCombination> combs = build.getAvailableMandatoryStreamCombinations(); 1220 if ((combs != null) && (!combs.isEmpty())) { 1221 MandatoryStreamCombination[] combArray = new MandatoryStreamCombination[combs.size()]; 1222 combArray = combs.toArray(combArray); 1223 return combArray; 1224 } 1225 1226 return null; 1227 } 1228 getStreamConfigurationMap()1229 private StreamConfigurationMap getStreamConfigurationMap() { 1230 StreamConfiguration[] configurations = getBase( 1231 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS); 1232 StreamConfigurationDuration[] minFrameDurations = getBase( 1233 CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS); 1234 StreamConfigurationDuration[] stallDurations = getBase( 1235 CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS); 1236 StreamConfiguration[] depthConfigurations = getBase( 1237 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS); 1238 StreamConfigurationDuration[] depthMinFrameDurations = getBase( 1239 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS); 1240 StreamConfigurationDuration[] depthStallDurations = getBase( 1241 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS); 1242 StreamConfiguration[] dynamicDepthConfigurations = getBase( 1243 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS); 1244 StreamConfigurationDuration[] dynamicDepthMinFrameDurations = getBase( 1245 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS); 1246 StreamConfigurationDuration[] dynamicDepthStallDurations = getBase( 1247 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS); 1248 StreamConfiguration[] heicConfigurations = getBase( 1249 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS); 1250 StreamConfigurationDuration[] heicMinFrameDurations = getBase( 1251 CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS); 1252 StreamConfigurationDuration[] heicStallDurations = getBase( 1253 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS); 1254 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1255 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1256 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1257 CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP); 1258 boolean listHighResolution = isBurstSupported(); 1259 return new StreamConfigurationMap( 1260 configurations, minFrameDurations, stallDurations, 1261 depthConfigurations, depthMinFrameDurations, depthStallDurations, 1262 dynamicDepthConfigurations, dynamicDepthMinFrameDurations, 1263 dynamicDepthStallDurations, heicConfigurations, 1264 heicMinFrameDurations, heicStallDurations, 1265 highSpeedVideoConfigurations, inputOutputFormatsMap, 1266 listHighResolution); 1267 } 1268 getMaxRegions(Key<T> key)1269 private <T> Integer getMaxRegions(Key<T> key) { 1270 final int AE = 0; 1271 final int AWB = 1; 1272 final int AF = 2; 1273 1274 // The order of the elements is: (AE, AWB, AF) 1275 int[] maxRegions = getBase(CameraCharacteristics.CONTROL_MAX_REGIONS); 1276 1277 if (maxRegions == null) { 1278 return null; 1279 } 1280 1281 if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AE)) { 1282 return maxRegions[AE]; 1283 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB)) { 1284 return maxRegions[AWB]; 1285 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AF)) { 1286 return maxRegions[AF]; 1287 } else { 1288 throw new AssertionError("Invalid key " + key); 1289 } 1290 } 1291 getMaxNumOutputs(Key<T> key)1292 private <T> Integer getMaxNumOutputs(Key<T> key) { 1293 final int RAW = 0; 1294 final int PROC = 1; 1295 final int PROC_STALLING = 2; 1296 1297 // The order of the elements is: (raw, proc+nonstalling, proc+stalling) 1298 int[] maxNumOutputs = getBase(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS); 1299 1300 if (maxNumOutputs == null) { 1301 return null; 1302 } 1303 1304 if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)) { 1305 return maxNumOutputs[RAW]; 1306 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)) { 1307 return maxNumOutputs[PROC]; 1308 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)) { 1309 return maxNumOutputs[PROC_STALLING]; 1310 } else { 1311 throw new AssertionError("Invalid key " + key); 1312 } 1313 } 1314 getTonemapCurve()1315 private <T> TonemapCurve getTonemapCurve() { 1316 float[] red = getBase(CaptureRequest.TONEMAP_CURVE_RED); 1317 float[] green = getBase(CaptureRequest.TONEMAP_CURVE_GREEN); 1318 float[] blue = getBase(CaptureRequest.TONEMAP_CURVE_BLUE); 1319 1320 if (areValuesAllNull(red, green, blue)) { 1321 return null; 1322 } 1323 1324 if (red == null || green == null || blue == null) { 1325 Log.w(TAG, "getTonemapCurve - missing tone curve components"); 1326 return null; 1327 } 1328 TonemapCurve tc = new TonemapCurve(red, green, blue); 1329 return tc; 1330 } 1331 getOisSamples()1332 private OisSample[] getOisSamples() { 1333 long[] timestamps = getBase(CaptureResult.STATISTICS_OIS_TIMESTAMPS); 1334 float[] xShifts = getBase(CaptureResult.STATISTICS_OIS_X_SHIFTS); 1335 float[] yShifts = getBase(CaptureResult.STATISTICS_OIS_Y_SHIFTS); 1336 1337 if (timestamps == null) { 1338 if (xShifts != null) { 1339 throw new AssertionError("timestamps is null but xShifts is not"); 1340 } 1341 1342 if (yShifts != null) { 1343 throw new AssertionError("timestamps is null but yShifts is not"); 1344 } 1345 1346 return null; 1347 } 1348 1349 if (xShifts == null) { 1350 throw new AssertionError("timestamps is not null but xShifts is"); 1351 } 1352 1353 if (yShifts == null) { 1354 throw new AssertionError("timestamps is not null but yShifts is"); 1355 } 1356 1357 if (xShifts.length != timestamps.length) { 1358 throw new AssertionError(String.format( 1359 "timestamps has %d entries but xShifts has %d", timestamps.length, 1360 xShifts.length)); 1361 } 1362 1363 if (yShifts.length != timestamps.length) { 1364 throw new AssertionError(String.format( 1365 "timestamps has %d entries but yShifts has %d", timestamps.length, 1366 yShifts.length)); 1367 } 1368 1369 OisSample[] samples = new OisSample[timestamps.length]; 1370 for (int i = 0; i < timestamps.length; i++) { 1371 samples[i] = new OisSample(timestamps[i], xShifts[i], yShifts[i]); 1372 } 1373 return samples; 1374 } 1375 setBase(CameraCharacteristics.Key<T> key, T value)1376 private <T> void setBase(CameraCharacteristics.Key<T> key, T value) { 1377 setBase(key.getNativeKey(), value); 1378 } 1379 setBase(CaptureResult.Key<T> key, T value)1380 private <T> void setBase(CaptureResult.Key<T> key, T value) { 1381 setBase(key.getNativeKey(), value); 1382 } 1383 setBase(CaptureRequest.Key<T> key, T value)1384 private <T> void setBase(CaptureRequest.Key<T> key, T value) { 1385 setBase(key.getNativeKey(), value); 1386 } 1387 setBase(Key<T> key, T value)1388 private <T> void setBase(Key<T> key, T value) { 1389 int tag = nativeGetTagFromKeyLocal(key.getName()); 1390 if (value == null) { 1391 // Erase the entry 1392 writeValues(tag, /*src*/null); 1393 return; 1394 } // else update the entry to a new value 1395 1396 int nativeType = nativeGetTypeFromTagLocal(tag); 1397 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 1398 int size = marshaler.calculateMarshalSize(value); 1399 1400 // TODO: Optimization. Cache the byte[] and reuse if the size is big enough. 1401 byte[] values = new byte[size]; 1402 1403 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 1404 marshaler.marshal(value, buffer); 1405 1406 writeValues(tag, values); 1407 } 1408 1409 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 1410 // metadata. 1411 private static final HashMap<Key<?>, SetCommand> sSetCommandMap = 1412 new HashMap<Key<?>, SetCommand>(); 1413 static { CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey()1414 sSetCommandMap.put(CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), 1415 new SetCommand() { 1416 @Override 1417 public <T> void setValue(CameraMetadataNative metadata, T value) { 1418 metadata.setAvailableFormats((int[]) value); 1419 } 1420 }); CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey()1421 sSetCommandMap.put(CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), 1422 new SetCommand() { 1423 @Override 1424 public <T> void setValue(CameraMetadataNative metadata, T value) { 1425 metadata.setFaceRectangles((Rect[]) value); 1426 } 1427 }); CaptureResult.STATISTICS_FACES.getNativeKey()1428 sSetCommandMap.put(CaptureResult.STATISTICS_FACES.getNativeKey(), 1429 new SetCommand() { 1430 @Override 1431 public <T> void setValue(CameraMetadataNative metadata, T value) { 1432 metadata.setFaces((Face[])value); 1433 } 1434 }); CaptureRequest.TONEMAP_CURVE.getNativeKey()1435 sSetCommandMap.put(CaptureRequest.TONEMAP_CURVE.getNativeKey(), new SetCommand() { 1436 @Override 1437 public <T> void setValue(CameraMetadataNative metadata, T value) { 1438 metadata.setTonemapCurve((TonemapCurve) value); 1439 } 1440 }); CaptureResult.JPEG_GPS_LOCATION.getNativeKey()1441 sSetCommandMap.put(CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new SetCommand() { 1442 @Override 1443 public <T> void setValue(CameraMetadataNative metadata, T value) { 1444 metadata.setGpsLocation((Location) value); 1445 } 1446 }); 1447 } 1448 setAvailableFormats(int[] value)1449 private boolean setAvailableFormats(int[] value) { 1450 int[] availableFormat = value; 1451 if (value == null) { 1452 // Let setBase() to handle the null value case. 1453 return false; 1454 } 1455 1456 int[] newValues = new int[availableFormat.length]; 1457 for (int i = 0; i < availableFormat.length; i++) { 1458 newValues[i] = availableFormat[i]; 1459 if (availableFormat[i] == ImageFormat.JPEG) { 1460 newValues[i] = NATIVE_JPEG_FORMAT; 1461 } 1462 } 1463 1464 setBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS, newValues); 1465 return true; 1466 } 1467 1468 /** 1469 * Convert Face Rectangles from managed side to native side as they have different definitions. 1470 * <p> 1471 * Managed side face rectangles are defined as: left, top, width, height. 1472 * Native side face rectangles are defined as: left, top, right, bottom. 1473 * The input face rectangle need to be converted to native side definition when set is called. 1474 * </p> 1475 * 1476 * @param faceRects Input face rectangles. 1477 * @return true if face rectangles can be set successfully. Otherwise, Let the caller 1478 * (setBase) to handle it appropriately. 1479 */ setFaceRectangles(Rect[] faceRects)1480 private boolean setFaceRectangles(Rect[] faceRects) { 1481 if (faceRects == null) { 1482 return false; 1483 } 1484 1485 Rect[] newFaceRects = new Rect[faceRects.length]; 1486 for (int i = 0; i < newFaceRects.length; i++) { 1487 newFaceRects[i] = new Rect( 1488 faceRects[i].left, 1489 faceRects[i].top, 1490 faceRects[i].right + faceRects[i].left, 1491 faceRects[i].bottom + faceRects[i].top); 1492 } 1493 1494 setBase(CaptureResult.STATISTICS_FACE_RECTANGLES, newFaceRects); 1495 return true; 1496 } 1497 setTonemapCurve(TonemapCurve tc)1498 private <T> boolean setTonemapCurve(TonemapCurve tc) { 1499 if (tc == null) { 1500 return false; 1501 } 1502 1503 float[][] curve = new float[3][]; 1504 for (int i = TonemapCurve.CHANNEL_RED; i <= TonemapCurve.CHANNEL_BLUE; i++) { 1505 int pointCount = tc.getPointCount(i); 1506 curve[i] = new float[pointCount * TonemapCurve.POINT_SIZE]; 1507 tc.copyColorCurve(i, curve[i], 0); 1508 } 1509 setBase(CaptureRequest.TONEMAP_CURVE_RED, curve[0]); 1510 setBase(CaptureRequest.TONEMAP_CURVE_GREEN, curve[1]); 1511 setBase(CaptureRequest.TONEMAP_CURVE_BLUE, curve[2]); 1512 1513 return true; 1514 } 1515 1516 private int mCameraId = -1; 1517 private Size mDisplaySize = new Size(0, 0); 1518 1519 /** 1520 * Set the current camera Id. 1521 * 1522 * @param cameraId Current camera id. 1523 * 1524 * @hide 1525 */ setCameraId(int cameraId)1526 public void setCameraId(int cameraId) { 1527 mCameraId = cameraId; 1528 } 1529 1530 /** 1531 * Set the current display size. 1532 * 1533 * @param displaySize The current display size. 1534 * 1535 * @hide 1536 */ setDisplaySize(Size displaySize)1537 public void setDisplaySize(Size displaySize) { 1538 mDisplaySize = displaySize; 1539 } 1540 1541 @UnsupportedAppUsage 1542 private long mMetadataPtr; // native CameraMetadata* 1543 nativeAllocate()1544 private native long nativeAllocate(); nativeAllocateCopy(CameraMetadataNative other)1545 private native long nativeAllocateCopy(CameraMetadataNative other) 1546 throws NullPointerException; 1547 nativeWriteToParcel(Parcel dest)1548 private native synchronized void nativeWriteToParcel(Parcel dest); nativeReadFromParcel(Parcel source)1549 private native synchronized void nativeReadFromParcel(Parcel source); nativeSwap(CameraMetadataNative other)1550 private native synchronized void nativeSwap(CameraMetadataNative other) 1551 throws NullPointerException; nativeClose()1552 private native synchronized void nativeClose(); nativeIsEmpty()1553 private native synchronized boolean nativeIsEmpty(); nativeGetEntryCount()1554 private native synchronized int nativeGetEntryCount(); 1555 1556 @UnsupportedAppUsage nativeReadValues(int tag)1557 private native synchronized byte[] nativeReadValues(int tag); nativeWriteValues(int tag, byte[] src)1558 private native synchronized void nativeWriteValues(int tag, byte[] src); nativeDump()1559 private native synchronized void nativeDump() throws IOException; // dump to ALOGD 1560 nativeGetAllVendorKeys(Class keyClass)1561 private native synchronized ArrayList nativeGetAllVendorKeys(Class keyClass); 1562 @UnsupportedAppUsage nativeGetTagFromKeyLocal(String keyName)1563 private native synchronized int nativeGetTagFromKeyLocal(String keyName) 1564 throws IllegalArgumentException; 1565 @UnsupportedAppUsage nativeGetTypeFromTagLocal(int tag)1566 private native synchronized int nativeGetTypeFromTagLocal(int tag) 1567 throws IllegalArgumentException; nativeGetTagFromKey(String keyName, long vendorId)1568 private static native int nativeGetTagFromKey(String keyName, long vendorId) 1569 throws IllegalArgumentException; nativeGetTypeFromTag(int tag, long vendorId)1570 private static native int nativeGetTypeFromTag(int tag, long vendorId) 1571 throws IllegalArgumentException; 1572 1573 /** 1574 * <p>Perform a 0-copy swap of the internal metadata with another object.</p> 1575 * 1576 * <p>Useful to convert a CameraMetadata into e.g. a CaptureRequest.</p> 1577 * 1578 * @param other Metadata to swap with 1579 * @throws NullPointerException if other was null 1580 * @hide 1581 */ swap(CameraMetadataNative other)1582 public void swap(CameraMetadataNative other) { 1583 nativeSwap(other); 1584 mCameraId = other.mCameraId; 1585 mDisplaySize = other.mDisplaySize; 1586 } 1587 1588 /** 1589 * @hide 1590 */ getEntryCount()1591 public int getEntryCount() { 1592 return nativeGetEntryCount(); 1593 } 1594 1595 /** 1596 * Does this metadata contain at least 1 entry? 1597 * 1598 * @hide 1599 */ isEmpty()1600 public boolean isEmpty() { 1601 return nativeIsEmpty(); 1602 } 1603 1604 1605 /** 1606 * Return a list containing keys of the given key class for all defined vendor tags. 1607 * 1608 * @hide 1609 */ getAllVendorKeys(Class<K> keyClass)1610 public <K> ArrayList<K> getAllVendorKeys(Class<K> keyClass) { 1611 if (keyClass == null) { 1612 throw new NullPointerException(); 1613 } 1614 return (ArrayList<K>) nativeGetAllVendorKeys(keyClass); 1615 } 1616 1617 /** 1618 * Convert a key string into the equivalent native tag. 1619 * 1620 * @throws IllegalArgumentException if the key was not recognized 1621 * @throws NullPointerException if the key was null 1622 * 1623 * @hide 1624 */ getTag(String key)1625 public static int getTag(String key) { 1626 return nativeGetTagFromKey(key, Long.MAX_VALUE); 1627 } 1628 1629 /** 1630 * Convert a key string into the equivalent native tag. 1631 * 1632 * @throws IllegalArgumentException if the key was not recognized 1633 * @throws NullPointerException if the key was null 1634 * 1635 * @hide 1636 */ getTag(String key, long vendorId)1637 public static int getTag(String key, long vendorId) { 1638 return nativeGetTagFromKey(key, vendorId); 1639 } 1640 1641 /** 1642 * Get the underlying native type for a tag. 1643 * 1644 * @param tag An integer tag, see e.g. {@link #getTag} 1645 * @param vendorId A vendor tag provider id 1646 * @return An int enum for the metadata type, see e.g. {@link #TYPE_BYTE} 1647 * 1648 * @hide 1649 */ getNativeType(int tag, long vendorId)1650 public static int getNativeType(int tag, long vendorId) { 1651 return nativeGetTypeFromTag(tag, vendorId); 1652 } 1653 1654 /** 1655 * <p>Updates the existing entry for tag with the new bytes pointed by src, erasing 1656 * the entry if src was null.</p> 1657 * 1658 * <p>An empty array can be passed in to update the entry to 0 elements.</p> 1659 * 1660 * @param tag An integer tag, see e.g. {@link #getTag} 1661 * @param src An array of bytes, or null to erase the entry 1662 * 1663 * @hide 1664 */ writeValues(int tag, byte[] src)1665 public void writeValues(int tag, byte[] src) { 1666 nativeWriteValues(tag, src); 1667 } 1668 1669 /** 1670 * <p>Returns a byte[] of data corresponding to this tag. Use a wrapped bytebuffer to unserialize 1671 * the data properly.</p> 1672 * 1673 * <p>An empty array can be returned to denote an existing entry with 0 elements.</p> 1674 * 1675 * @param tag An integer tag, see e.g. {@link #getTag} 1676 * 1677 * @return {@code null} if there were 0 entries for this tag, a byte[] otherwise. 1678 * @hide 1679 */ readValues(int tag)1680 public byte[] readValues(int tag) { 1681 // TODO: Optimization. Native code returns a ByteBuffer instead. 1682 return nativeReadValues(tag); 1683 } 1684 1685 /** 1686 * Dumps the native metadata contents to logcat. 1687 * 1688 * <p>Visibility for testing/debugging only. The results will not 1689 * include any synthesized keys, as they are invisible to the native layer.</p> 1690 * 1691 * @hide 1692 */ dumpToLog()1693 public void dumpToLog() { 1694 try { 1695 nativeDump(); 1696 } catch (IOException e) { 1697 Log.wtf(TAG, "Dump logging failed", e); 1698 } 1699 } 1700 1701 @Override finalize()1702 protected void finalize() throws Throwable { 1703 try { 1704 close(); 1705 } finally { 1706 super.finalize(); 1707 } 1708 } 1709 1710 /** 1711 * Get the marshaler compatible with the {@code key} and type {@code T}. 1712 * 1713 * @throws UnsupportedOperationException 1714 * if the native/managed type combination for {@code key} is not supported 1715 */ getMarshalerForKey(Key<T> key, int nativeType)1716 private static <T> Marshaler<T> getMarshalerForKey(Key<T> key, int nativeType) { 1717 return MarshalRegistry.getMarshaler(key.getTypeReference(), 1718 nativeType); 1719 } 1720 1721 @SuppressWarnings({ "unchecked", "rawtypes" }) registerAllMarshalers()1722 private static void registerAllMarshalers() { 1723 if (DEBUG) { 1724 Log.v(TAG, "Shall register metadata marshalers"); 1725 } 1726 1727 MarshalQueryable[] queryList = new MarshalQueryable[] { 1728 // marshalers for standard types 1729 new MarshalQueryablePrimitive(), 1730 new MarshalQueryableEnum(), 1731 new MarshalQueryableArray(), 1732 1733 // pseudo standard types, that expand/narrow the native type into a managed type 1734 new MarshalQueryableBoolean(), 1735 new MarshalQueryableNativeByteToInteger(), 1736 1737 // marshalers for custom types 1738 new MarshalQueryableRect(), 1739 new MarshalQueryableSize(), 1740 new MarshalQueryableSizeF(), 1741 new MarshalQueryableString(), 1742 new MarshalQueryableReprocessFormatsMap(), 1743 new MarshalQueryableRange(), 1744 new MarshalQueryablePair(), 1745 new MarshalQueryableMeteringRectangle(), 1746 new MarshalQueryableColorSpaceTransform(), 1747 new MarshalQueryableStreamConfiguration(), 1748 new MarshalQueryableStreamConfigurationDuration(), 1749 new MarshalQueryableRggbChannelVector(), 1750 new MarshalQueryableBlackLevelPattern(), 1751 new MarshalQueryableHighSpeedVideoConfiguration(), 1752 new MarshalQueryableRecommendedStreamConfiguration(), 1753 1754 // generic parcelable marshaler (MUST BE LAST since it has lowest priority) 1755 new MarshalQueryableParcelable(), 1756 }; 1757 1758 for (MarshalQueryable query : queryList) { 1759 MarshalRegistry.registerMarshalQueryable(query); 1760 } 1761 if (DEBUG) { 1762 Log.v(TAG, "Registered metadata marshalers"); 1763 } 1764 } 1765 1766 /** Check if input arguments are all {@code null}. 1767 * 1768 * @param objs Input arguments for null check 1769 * @return {@code true} if input arguments are all {@code null}, otherwise {@code false} 1770 */ areValuesAllNull(Object... objs)1771 private static boolean areValuesAllNull(Object... objs) { 1772 for (Object o : objs) { 1773 if (o != null) return false; 1774 } 1775 return true; 1776 } 1777 1778 static { registerAllMarshalers()1779 registerAllMarshalers(); 1780 } 1781 } 1782