1<?xml version="1.0" encoding="utf-8"?> 2<!-- Copyright (C) 2012 The Android Open Source Project 3 4 Licensed under the Apache License, Version 2.0 (the "License"); 5 you may not use this file except in compliance with the License. 6 You may obtain a copy of the License at 7 8 http://www.apache.org/licenses/LICENSE-2.0 9 10 Unless required by applicable law or agreed to in writing, software 11 distributed under the License is distributed on an "AS IS" BASIS, 12 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 See the License for the specific language governing permissions and 14 limitations under the License. 15--> 16<metadata xmlns="http://schemas.android.com/service/camera/metadata/" 17xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 18xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata_properties.xsd"> 19 20 <tags> 21 <tag id="BC"> 22 Needed for backwards compatibility with old Java API 23 </tag> 24 <tag id="V1"> 25 New features for first camera 2 release (API1) 26 </tag> 27 <tag id="RAW"> 28 Needed for useful RAW image processing and DNG file support 29 </tag> 30 <tag id="HAL2"> 31 Entry is only used by camera device legacy HAL 2.x 32 </tag> 33 <tag id="FULL"> 34 Entry is required for full hardware level devices, and optional for other hardware levels 35 </tag> 36 <tag id="DEPTH"> 37 Entry is required for the depth capability. 38 </tag> 39 <tag id="REPROC"> 40 Entry is required for the YUV or PRIVATE reprocessing capability. 41 </tag> 42 <tag id="LOGICALCAMERA"> 43 Entry is required for logical multi-camera capability. 44 </tag> 45 <tag id="FUTURE"> 46 Entry is under-specified and is not required for now. This is for book-keeping purpose, 47 do not implement or use it, it may be revised for future. 48 </tag> 49 </tags> 50 51 <types> 52 <typedef name="pairFloatFloat"> 53 <language name="java">android.util.Pair<Float,Float></language> 54 </typedef> 55 <typedef name="pairDoubleDouble"> 56 <language name="java">android.util.Pair<Double,Double></language> 57 </typedef> 58 <typedef name="rectangle"> 59 <language name="java">android.graphics.Rect</language> 60 </typedef> 61 <typedef name="size"> 62 <language name="java">android.util.Size</language> 63 </typedef> 64 <typedef name="string"> 65 <language name="java">String</language> 66 </typedef> 67 <typedef name="boolean"> 68 <language name="java">boolean</language> 69 </typedef> 70 <typedef name="imageFormat"> 71 <language name="java">int</language> 72 </typedef> 73 <typedef name="streamConfigurationMap"> 74 <language name="java">android.hardware.camera2.params.StreamConfigurationMap</language> 75 </typedef> 76 <typedef name="streamConfiguration"> 77 <language name="java">android.hardware.camera2.params.StreamConfiguration</language> 78 </typedef> 79 <typedef name="streamConfigurationDuration"> 80 <language name="java">android.hardware.camera2.params.StreamConfigurationDuration</language> 81 </typedef> 82 <typedef name="face"> 83 <language name="java">android.hardware.camera2.params.Face</language> 84 </typedef> 85 <typedef name="meteringRectangle"> 86 <language name="java">android.hardware.camera2.params.MeteringRectangle</language> 87 </typedef> 88 <typedef name="rangeFloat"> 89 <language name="java">android.util.Range<Float></language> 90 </typedef> 91 <typedef name="rangeInt"> 92 <language name="java">android.util.Range<Integer></language> 93 </typedef> 94 <typedef name="rangeLong"> 95 <language name="java">android.util.Range<Long></language> 96 </typedef> 97 <typedef name="colorSpaceTransform"> 98 <language name="java">android.hardware.camera2.params.ColorSpaceTransform</language> 99 </typedef> 100 <typedef name="rggbChannelVector"> 101 <language name="java">android.hardware.camera2.params.RggbChannelVector</language> 102 </typedef> 103 <typedef name="blackLevelPattern"> 104 <language name="java">android.hardware.camera2.params.BlackLevelPattern</language> 105 </typedef> 106 <typedef name="enumList"> 107 <language name="java">int</language> 108 </typedef> 109 <typedef name="sizeF"> 110 <language name="java">android.util.SizeF</language> 111 </typedef> 112 <typedef name="point"> 113 <language name="java">android.graphics.Point</language> 114 </typedef> 115 <typedef name="tonemapCurve"> 116 <language name="java">android.hardware.camera2.params.TonemapCurve</language> 117 </typedef> 118 <typedef name="lensShadingMap"> 119 <language name="java">android.hardware.camera2.params.LensShadingMap</language> 120 </typedef> 121 <typedef name="location"> 122 <language name="java">android.location.Location</language> 123 </typedef> 124 <typedef name="highSpeedVideoConfiguration"> 125 <language name="java">android.hardware.camera2.params.HighSpeedVideoConfiguration</language> 126 </typedef> 127 <typedef name="reprocessFormatsMap"> 128 <language name="java">android.hardware.camera2.params.ReprocessFormatsMap</language> 129 </typedef> 130 <typedef name="oisSample"> 131 <language name="java">android.hardware.camera2.params.OisSample</language> 132 </typedef> 133 </types> 134 135 <namespace name="android"> 136 <section name="colorCorrection"> 137 <controls> 138 <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full"> 139 <enum> 140 <value>TRANSFORM_MATRIX 141 <notes>Use the android.colorCorrection.transform matrix 142 and android.colorCorrection.gains to do color conversion. 143 144 All advanced white balance adjustments (not specified 145 by our white balance pipeline) must be disabled. 146 147 If AWB is enabled with `android.control.awbMode != OFF`, then 148 TRANSFORM_MATRIX is ignored. The camera device will override 149 this value to either FAST or HIGH_QUALITY. 150 </notes> 151 </value> 152 <value>FAST 153 <notes>Color correction processing must not slow down 154 capture rate relative to sensor raw output. 155 156 Advanced white balance adjustments above and beyond 157 the specified white balance pipeline may be applied. 158 159 If AWB is enabled with `android.control.awbMode != OFF`, then 160 the camera device uses the last frame's AWB values 161 (or defaults if AWB has never been run). 162 </notes> 163 </value> 164 <value>HIGH_QUALITY 165 <notes>Color correction processing operates at improved 166 quality but the capture rate might be reduced (relative to sensor 167 raw output rate) 168 169 Advanced white balance adjustments above and beyond 170 the specified white balance pipeline may be applied. 171 172 If AWB is enabled with `android.control.awbMode != OFF`, then 173 the camera device uses the last frame's AWB values 174 (or defaults if AWB has never been run). 175 </notes> 176 </value> 177 </enum> 178 179 <description> 180 The mode control selects how the image data is converted from the 181 sensor's native color into linear sRGB color. 182 </description> 183 <details> 184 When auto-white balance (AWB) is enabled with android.control.awbMode, this 185 control is overridden by the AWB routine. When AWB is disabled, the 186 application controls how the color mapping is performed. 187 188 We define the expected processing pipeline below. For consistency 189 across devices, this is always the case with TRANSFORM_MATRIX. 190 191 When either FULL or HIGH_QUALITY is used, the camera device may 192 do additional processing but android.colorCorrection.gains and 193 android.colorCorrection.transform will still be provided by the 194 camera device (in the results) and be roughly correct. 195 196 Switching to TRANSFORM_MATRIX and using the data provided from 197 FAST or HIGH_QUALITY will yield a picture with the same white point 198 as what was produced by the camera device in the earlier frame. 199 200 The expected processing pipeline is as follows: 201 202  203 204 The white balance is encoded by two values, a 4-channel white-balance 205 gain vector (applied in the Bayer domain), and a 3x3 color transform 206 matrix (applied after demosaic). 207 208 The 4-channel white-balance gains are defined as: 209 210 android.colorCorrection.gains = [ R G_even G_odd B ] 211 212 where `G_even` is the gain for green pixels on even rows of the 213 output, and `G_odd` is the gain for green pixels on the odd rows. 214 These may be identical for a given camera device implementation; if 215 the camera device does not support a separate gain for even/odd green 216 channels, it will use the `G_even` value, and write `G_odd` equal to 217 `G_even` in the output result metadata. 218 219 The matrices for color transforms are defined as a 9-entry vector: 220 221 android.colorCorrection.transform = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ] 222 223 which define a transform from input sensor colors, `P_in = [ r g b ]`, 224 to output linear sRGB, `P_out = [ r' g' b' ]`, 225 226 with colors as follows: 227 228 r' = I0r + I1g + I2b 229 g' = I3r + I4g + I5b 230 b' = I6r + I7g + I8b 231 232 Both the input and output value ranges must match. Overflow/underflow 233 values are clipped to fit within the range. 234 </details> 235 <hal_details> 236 HAL must support both FAST and HIGH_QUALITY if color correction control is available 237 on the camera device, but the underlying implementation can be the same for both modes. 238 That is, if the highest quality implementation on the camera device does not slow down 239 capture rate, then FAST and HIGH_QUALITY should generate the same output. 240 </hal_details> 241 </entry> 242 <entry name="transform" type="rational" visibility="public" 243 type_notes="3x3 rational matrix in row-major order" 244 container="array" typedef="colorSpaceTransform" hwlevel="full"> 245 <array> 246 <size>3</size> 247 <size>3</size> 248 </array> 249 <description>A color transform matrix to use to transform 250 from sensor RGB color space to output linear sRGB color space. 251 </description> 252 <units>Unitless scale factors</units> 253 <details>This matrix is either set by the camera device when the request 254 android.colorCorrection.mode is not TRANSFORM_MATRIX, or 255 directly by the application in the request when the 256 android.colorCorrection.mode is TRANSFORM_MATRIX. 257 258 In the latter case, the camera device may round the matrix to account 259 for precision issues; the final rounded matrix should be reported back 260 in this matrix result metadata. The transform should keep the magnitude 261 of the output color values within `[0, 1.0]` (assuming input color 262 values is within the normalized range `[0, 1.0]`), or clipping may occur. 263 264 The valid range of each matrix element varies on different devices, but 265 values within [-1.5, 3.0] are guaranteed not to be clipped. 266 </details> 267 </entry> 268 <entry name="gains" type="float" visibility="public" 269 type_notes="A 1D array of floats for 4 color channel gains" 270 container="array" typedef="rggbChannelVector" hwlevel="full"> 271 <array> 272 <size>4</size> 273 </array> 274 <description>Gains applying to Bayer raw color channels for 275 white-balance.</description> 276 <units>Unitless gain factors</units> 277 <details> 278 These per-channel gains are either set by the camera device 279 when the request android.colorCorrection.mode is not 280 TRANSFORM_MATRIX, or directly by the application in the 281 request when the android.colorCorrection.mode is 282 TRANSFORM_MATRIX. 283 284 The gains in the result metadata are the gains actually 285 applied by the camera device to the current frame. 286 287 The valid range of gains varies on different devices, but gains 288 between [1.0, 3.0] are guaranteed not to be clipped. Even if a given 289 device allows gains below 1.0, this is usually not recommended because 290 this can create color artifacts. 291 </details> 292 <hal_details> 293 The 4-channel white-balance gains are defined in 294 the order of `[R G_even G_odd B]`, where `G_even` is the gain 295 for green pixels on even rows of the output, and `G_odd` 296 is the gain for green pixels on the odd rows. 297 298 If a HAL does not support a separate gain for even/odd green 299 channels, it must use the `G_even` value, and write 300 `G_odd` equal to `G_even` in the output result metadata. 301 </hal_details> 302 </entry> 303 <entry name="aberrationMode" type="byte" visibility="public" enum="true" hwlevel="legacy"> 304 <enum> 305 <value>OFF 306 <notes> 307 No aberration correction is applied. 308 </notes> 309 </value> 310 <value>FAST 311 <notes> 312 Aberration correction will not slow down capture rate 313 relative to sensor raw output. 314 </notes> 315 </value> 316 <value>HIGH_QUALITY 317 <notes> 318 Aberration correction operates at improved quality but the capture rate might be 319 reduced (relative to sensor raw output rate) 320 </notes> 321 </value> 322 </enum> 323 <description> 324 Mode of operation for the chromatic aberration correction algorithm. 325 </description> 326 <range>android.colorCorrection.availableAberrationModes</range> 327 <details> 328 Chromatic (color) aberration is caused by the fact that different wavelengths of light 329 can not focus on the same point after exiting from the lens. This metadata defines 330 the high level control of chromatic aberration correction algorithm, which aims to 331 minimize the chromatic artifacts that may occur along the object boundaries in an 332 image. 333 334 FAST/HIGH_QUALITY both mean that camera device determined aberration 335 correction will be applied. HIGH_QUALITY mode indicates that the camera device will 336 use the highest-quality aberration correction algorithms, even if it slows down 337 capture rate. FAST means the camera device will not slow down capture rate when 338 applying aberration correction. 339 340 LEGACY devices will always be in FAST mode. 341 </details> 342 </entry> 343 </controls> 344 <dynamic> 345 <clone entry="android.colorCorrection.mode" kind="controls"> 346 </clone> 347 <clone entry="android.colorCorrection.transform" kind="controls"> 348 </clone> 349 <clone entry="android.colorCorrection.gains" kind="controls"> 350 </clone> 351 <clone entry="android.colorCorrection.aberrationMode" kind="controls"> 352 </clone> 353 </dynamic> 354 <static> 355 <entry name="availableAberrationModes" type="byte" visibility="public" 356 type_notes="list of enums" container="array" typedef="enumList" hwlevel="legacy"> 357 <array> 358 <size>n</size> 359 </array> 360 <description> 361 List of aberration correction modes for android.colorCorrection.aberrationMode that are 362 supported by this camera device. 363 </description> 364 <range>Any value listed in android.colorCorrection.aberrationMode</range> 365 <details> 366 This key lists the valid modes for android.colorCorrection.aberrationMode. If no 367 aberration correction modes are available for a device, this list will solely include 368 OFF mode. All camera devices will support either OFF or FAST mode. 369 370 Camera devices that support the MANUAL_POST_PROCESSING capability will always list 371 OFF mode. This includes all FULL level devices. 372 373 LEGACY devices will always only support FAST mode. 374 </details> 375 <hal_details> 376 HAL must support both FAST and HIGH_QUALITY if chromatic aberration control is available 377 on the camera device, but the underlying implementation can be the same for both modes. 378 That is, if the highest quality implementation on the camera device does not slow down 379 capture rate, then FAST and HIGH_QUALITY will generate the same output. 380 </hal_details> 381 <tag id="V1" /> 382 </entry> 383 </static> 384 </section> 385 <section name="control"> 386 <controls> 387 <entry name="aeAntibandingMode" type="byte" visibility="public" 388 enum="true" hwlevel="legacy"> 389 <enum> 390 <value>OFF 391 <notes> 392 The camera device will not adjust exposure duration to 393 avoid banding problems. 394 </notes> 395 </value> 396 <value>50HZ 397 <notes> 398 The camera device will adjust exposure duration to 399 avoid banding problems with 50Hz illumination sources. 400 </notes> 401 </value> 402 <value>60HZ 403 <notes> 404 The camera device will adjust exposure duration to 405 avoid banding problems with 60Hz illumination 406 sources. 407 </notes> 408 </value> 409 <value>AUTO 410 <notes> 411 The camera device will automatically adapt its 412 antibanding routine to the current illumination 413 condition. This is the default mode if AUTO is 414 available on given camera device. 415 </notes> 416 </value> 417 </enum> 418 <description> 419 The desired setting for the camera device's auto-exposure 420 algorithm's antibanding compensation. 421 </description> 422 <range> 423 android.control.aeAvailableAntibandingModes 424 </range> 425 <details> 426 Some kinds of lighting fixtures, such as some fluorescent 427 lights, flicker at the rate of the power supply frequency 428 (60Hz or 50Hz, depending on country). While this is 429 typically not noticeable to a person, it can be visible to 430 a camera device. If a camera sets its exposure time to the 431 wrong value, the flicker may become visible in the 432 viewfinder as flicker or in a final captured image, as a 433 set of variable-brightness bands across the image. 434 435 Therefore, the auto-exposure routines of camera devices 436 include antibanding routines that ensure that the chosen 437 exposure value will not cause such banding. The choice of 438 exposure time depends on the rate of flicker, which the 439 camera device can detect automatically, or the expected 440 rate can be selected by the application using this 441 control. 442 443 A given camera device may not support all of the possible 444 options for the antibanding mode. The 445 android.control.aeAvailableAntibandingModes key contains 446 the available modes for a given camera device. 447 448 AUTO mode is the default if it is available on given 449 camera device. When AUTO mode is not available, the 450 default will be either 50HZ or 60HZ, and both 50HZ 451 and 60HZ will be available. 452 453 If manual exposure control is enabled (by setting 454 android.control.aeMode or android.control.mode to OFF), 455 then this setting has no effect, and the application must 456 ensure it selects exposure times that do not cause banding 457 issues. The android.statistics.sceneFlicker key can assist 458 the application in this. 459 </details> 460 <hal_details> 461 For all capture request templates, this field must be set 462 to AUTO if AUTO mode is available. If AUTO is not available, 463 the default must be either 50HZ or 60HZ, and both 50HZ and 464 60HZ must be available. 465 466 If manual exposure control is enabled (by setting 467 android.control.aeMode or android.control.mode to OFF), 468 then the exposure values provided by the application must not be 469 adjusted for antibanding. 470 </hal_details> 471 <tag id="BC" /> 472 </entry> 473 <entry name="aeExposureCompensation" type="int32" visibility="public" hwlevel="legacy"> 474 <description>Adjustment to auto-exposure (AE) target image 475 brightness.</description> 476 <units>Compensation steps</units> 477 <range>android.control.aeCompensationRange</range> 478 <details> 479 The adjustment is measured as a count of steps, with the 480 step size defined by android.control.aeCompensationStep and the 481 allowed range by android.control.aeCompensationRange. 482 483 For example, if the exposure value (EV) step is 0.333, '6' 484 will mean an exposure compensation of +2 EV; -3 will mean an 485 exposure compensation of -1 EV. One EV represents a doubling 486 of image brightness. Note that this control will only be 487 effective if android.control.aeMode `!=` OFF. This control 488 will take effect even when android.control.aeLock `== true`. 489 490 In the event of exposure compensation value being changed, camera device 491 may take several frames to reach the newly requested exposure target. 492 During that time, android.control.aeState field will be in the SEARCHING 493 state. Once the new exposure target is reached, android.control.aeState will 494 change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or 495 FLASH_REQUIRED (if the scene is too dark for still capture). 496 </details> 497 <tag id="BC" /> 498 </entry> 499 <entry name="aeLock" type="byte" visibility="public" enum="true" 500 typedef="boolean" hwlevel="legacy"> 501 <enum> 502 <value>OFF 503 <notes>Auto-exposure lock is disabled; the AE algorithm 504 is free to update its parameters.</notes></value> 505 <value>ON 506 <notes>Auto-exposure lock is enabled; the AE algorithm 507 must not update the exposure and sensitivity parameters 508 while the lock is active. 509 510 android.control.aeExposureCompensation setting changes 511 will still take effect while auto-exposure is locked. 512 513 Some rare LEGACY devices may not support 514 this, in which case the value will always be overridden to OFF. 515 </notes></value> 516 </enum> 517 <description>Whether auto-exposure (AE) is currently locked to its latest 518 calculated values.</description> 519 <details> 520 When set to `true` (ON), the AE algorithm is locked to its latest parameters, 521 and will not change exposure settings until the lock is set to `false` (OFF). 522 523 Note that even when AE is locked, the flash may be fired if 524 the android.control.aeMode is ON_AUTO_FLASH / 525 ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE. 526 527 When android.control.aeExposureCompensation is changed, even if the AE lock 528 is ON, the camera device will still adjust its exposure value. 529 530 If AE precapture is triggered (see android.control.aePrecaptureTrigger) 531 when AE is already locked, the camera device will not change the exposure time 532 (android.sensor.exposureTime) and sensitivity (android.sensor.sensitivity) 533 parameters. The flash may be fired if the android.control.aeMode 534 is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the 535 android.control.aeMode is ON_ALWAYS_FLASH, the scene may become overexposed. 536 Similarly, AE precapture trigger CANCEL has no effect when AE is already locked. 537 538 When an AE precapture sequence is triggered, AE unlock will not be able to unlock 539 the AE if AE is locked by the camera device internally during precapture metering 540 sequence In other words, submitting requests with AE unlock has no effect for an 541 ongoing precapture metering sequence. Otherwise, the precapture metering sequence 542 will never succeed in a sequence of preview requests where AE lock is always set 543 to `false`. 544 545 Since the camera device has a pipeline of in-flight requests, the settings that 546 get locked do not necessarily correspond to the settings that were present in the 547 latest capture result received from the camera device, since additional captures 548 and AE updates may have occurred even before the result was sent out. If an 549 application is switching between automatic and manual control and wishes to eliminate 550 any flicker during the switch, the following procedure is recommended: 551 552 1. Starting in auto-AE mode: 553 2. Lock AE 554 3. Wait for the first result to be output that has the AE locked 555 4. Copy exposure settings from that result into a request, set the request to manual AE 556 5. Submit the capture request, proceed to run manual AE as desired. 557 558 See android.control.aeState for AE lock related state transition details. 559 </details> 560 <tag id="BC" /> 561 </entry> 562 <entry name="aeMode" type="byte" visibility="public" enum="true" hwlevel="legacy"> 563 <enum> 564 <value>OFF 565 <notes> 566 The camera device's autoexposure routine is disabled. 567 568 The application-selected android.sensor.exposureTime, 569 android.sensor.sensitivity and 570 android.sensor.frameDuration are used by the camera 571 device, along with android.flash.* fields, if there's 572 a flash unit for this camera device. 573 574 Note that auto-white balance (AWB) and auto-focus (AF) 575 behavior is device dependent when AE is in OFF mode. 576 To have consistent behavior across different devices, 577 it is recommended to either set AWB and AF to OFF mode 578 or lock AWB and AF before setting AE to OFF. 579 See android.control.awbMode, android.control.afMode, 580 android.control.awbLock, and android.control.afTrigger 581 for more details. 582 583 LEGACY devices do not support the OFF mode and will 584 override attempts to use this value to ON. 585 </notes> 586 </value> 587 <value>ON 588 <notes> 589 The camera device's autoexposure routine is active, 590 with no flash control. 591 592 The application's values for 593 android.sensor.exposureTime, 594 android.sensor.sensitivity, and 595 android.sensor.frameDuration are ignored. The 596 application has control over the various 597 android.flash.* fields. 598 </notes> 599 </value> 600 <value>ON_AUTO_FLASH 601 <notes> 602 Like ON, except that the camera device also controls 603 the camera's flash unit, firing it in low-light 604 conditions. 605 606 The flash may be fired during a precapture sequence 607 (triggered by android.control.aePrecaptureTrigger) and 608 may be fired for captures for which the 609 android.control.captureIntent field is set to 610 STILL_CAPTURE 611 </notes> 612 </value> 613 <value>ON_ALWAYS_FLASH 614 <notes> 615 Like ON, except that the camera device also controls 616 the camera's flash unit, always firing it for still 617 captures. 618 619 The flash may be fired during a precapture sequence 620 (triggered by android.control.aePrecaptureTrigger) and 621 will always be fired for captures for which the 622 android.control.captureIntent field is set to 623 STILL_CAPTURE 624 </notes> 625 </value> 626 <value>ON_AUTO_FLASH_REDEYE 627 <notes> 628 Like ON_AUTO_FLASH, but with automatic red eye 629 reduction. 630 631 If deemed necessary by the camera device, a red eye 632 reduction flash will fire during the precapture 633 sequence. 634 </notes> 635 </value> 636 <value hal_version="3.3">ON_EXTERNAL_FLASH 637 <notes> 638 An external flash has been turned on. 639 640 It informs the camera device that an external flash has been turned on, and that 641 metering (and continuous focus if active) should be quickly recaculated to account 642 for the external flash. Otherwise, this mode acts like ON. 643 644 When the external flash is turned off, AE mode should be changed to one of the 645 other available AE modes. 646 647 If the camera device supports AE external flash mode, android.control.aeState must 648 be FLASH_REQUIRED after the camera device finishes AE scan and it's too dark without 649 flash. 650 </notes> 651 </value> 652 </enum> 653 <description>The desired mode for the camera device's 654 auto-exposure routine.</description> 655 <range>android.control.aeAvailableModes</range> 656 <details> 657 This control is only effective if android.control.mode is 658 AUTO. 659 660 When set to any of the ON modes, the camera device's 661 auto-exposure routine is enabled, overriding the 662 application's selected exposure time, sensor sensitivity, 663 and frame duration (android.sensor.exposureTime, 664 android.sensor.sensitivity, and 665 android.sensor.frameDuration). If one of the FLASH modes 666 is selected, the camera device's flash unit controls are 667 also overridden. 668 669 The FLASH modes are only available if the camera device 670 has a flash unit (android.flash.info.available is `true`). 671 672 If flash TORCH mode is desired, this field must be set to 673 ON or OFF, and android.flash.mode set to TORCH. 674 675 When set to any of the ON modes, the values chosen by the 676 camera device auto-exposure routine for the overridden 677 fields for a given capture will be available in its 678 CaptureResult. 679 </details> 680 <tag id="BC" /> 681 </entry> 682 <entry name="aeRegions" type="int32" visibility="public" 683 optional="true" container="array" typedef="meteringRectangle"> 684 <array> 685 <size>5</size> 686 <size>area_count</size> 687 </array> 688 <description>List of metering areas to use for auto-exposure adjustment.</description> 689 <units>Pixel coordinates within android.sensor.info.activeArraySize</units> 690 <range>Coordinates must be between `[(0,0), (width, height))` of 691 android.sensor.info.activeArraySize</range> 692 <details> 693 Not available if android.control.maxRegionsAe is 0. 694 Otherwise will always be present. 695 696 The maximum number of regions supported by the device is determined by the value 697 of android.control.maxRegionsAe. 698 699 The coordinate system is based on the active pixel array, 700 with (0,0) being the top-left pixel in the active pixel array, and 701 (android.sensor.info.activeArraySize.width - 1, 702 android.sensor.info.activeArraySize.height - 1) being the 703 bottom-right pixel in the active pixel array. 704 705 The weight must be within `[0, 1000]`, and represents a weight 706 for every pixel in the area. This means that a large metering area 707 with the same weight as a smaller area will have more effect in 708 the metering result. Metering areas can partially overlap and the 709 camera device will add the weights in the overlap region. 710 711 The weights are relative to weights of other exposure metering regions, so if only one 712 region is used, all non-zero weights will have the same effect. A region with 0 713 weight is ignored. 714 715 If all regions have 0 weight, then no specific metering area needs to be used by the 716 camera device. 717 718 If the metering region is outside the used android.scaler.cropRegion returned in 719 capture result metadata, the camera device will ignore the sections outside the crop 720 region and output only the intersection rectangle as the metering region in the result 721 metadata. If the region is entirely outside the crop region, it will be ignored and 722 not reported in the result metadata. 723 </details> 724 <ndk_details> 725 The data representation is `int[5 * area_count]`. 726 Every five elements represent a metering region of `(xmin, ymin, xmax, ymax, weight)`. 727 The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and 728 ymax. 729 </ndk_details> 730 <hal_details> 731 The HAL level representation of MeteringRectangle[] is a 732 int[5 * area_count]. 733 Every five elements represent a metering region of 734 (xmin, ymin, xmax, ymax, weight). 735 The rectangle is defined to be inclusive on xmin and ymin, but 736 exclusive on xmax and ymax. 737 </hal_details> 738 <tag id="BC" /> 739 </entry> 740 <entry name="aeTargetFpsRange" type="int32" visibility="public" 741 container="array" typedef="rangeInt" hwlevel="legacy"> 742 <array> 743 <size>2</size> 744 </array> 745 <description>Range over which the auto-exposure routine can 746 adjust the capture frame rate to maintain good 747 exposure.</description> 748 <units>Frames per second (FPS)</units> 749 <range>Any of the entries in android.control.aeAvailableTargetFpsRanges</range> 750 <details>Only constrains auto-exposure (AE) algorithm, not 751 manual control of android.sensor.exposureTime and 752 android.sensor.frameDuration.</details> 753 <tag id="BC" /> 754 </entry> 755 <entry name="aePrecaptureTrigger" type="byte" visibility="public" 756 enum="true" hwlevel="limited"> 757 <enum> 758 <value>IDLE 759 <notes>The trigger is idle.</notes> 760 </value> 761 <value>START 762 <notes>The precapture metering sequence will be started 763 by the camera device. 764 765 The exact effect of the precapture trigger depends on 766 the current AE mode and state.</notes> 767 </value> 768 <value>CANCEL 769 <notes>The camera device will cancel any currently active or completed 770 precapture metering sequence, the auto-exposure routine will return to its 771 initial state.</notes> 772 </value> 773 </enum> 774 <description>Whether the camera device will trigger a precapture 775 metering sequence when it processes this request.</description> 776 <details>This entry is normally set to IDLE, or is not 777 included at all in the request settings. When included and 778 set to START, the camera device will trigger the auto-exposure (AE) 779 precapture metering sequence. 780 781 When set to CANCEL, the camera device will cancel any active 782 precapture metering trigger, and return to its initial AE state. 783 If a precapture metering sequence is already completed, and the camera 784 device has implicitly locked the AE for subsequent still capture, the 785 CANCEL trigger will unlock the AE and return to its initial AE state. 786 787 The precapture sequence should be triggered before starting a 788 high-quality still capture for final metering decisions to 789 be made, and for firing pre-capture flash pulses to estimate 790 scene brightness and required final capture flash power, when 791 the flash is enabled. 792 793 Normally, this entry should be set to START for only a 794 single request, and the application should wait until the 795 sequence completes before starting a new one. 796 797 When a precapture metering sequence is finished, the camera device 798 may lock the auto-exposure routine internally to be able to accurately expose the 799 subsequent still capture image (`android.control.captureIntent == STILL_CAPTURE`). 800 For this case, the AE may not resume normal scan if no subsequent still capture is 801 submitted. To ensure that the AE routine restarts normal scan, the application should 802 submit a request with `android.control.aeLock == true`, followed by a request 803 with `android.control.aeLock == false`, if the application decides not to submit a 804 still capture request after the precapture sequence completes. Alternatively, for 805 API level 23 or newer devices, the CANCEL can be used to unlock the camera device 806 internally locked AE if the application doesn't submit a still capture request after 807 the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not 808 be used in devices that have earlier API levels. 809 810 The exact effect of auto-exposure (AE) precapture trigger 811 depends on the current AE mode and state; see 812 android.control.aeState for AE precapture state transition 813 details. 814 815 On LEGACY-level devices, the precapture trigger is not supported; 816 capturing a high-resolution JPEG image will automatically trigger a 817 precapture sequence before the high-resolution capture, including 818 potentially firing a pre-capture flash. 819 820 Using the precapture trigger and the auto-focus trigger android.control.afTrigger 821 simultaneously is allowed. However, since these triggers often require cooperation between 822 the auto-focus and auto-exposure routines (for example, the may need to be enabled for a 823 focus sweep), the camera device may delay acting on a later trigger until the previous 824 trigger has been fully handled. This may lead to longer intervals between the trigger and 825 changes to android.control.aeState indicating the start of the precapture sequence, for 826 example. 827 828 If both the precapture and the auto-focus trigger are activated on the same request, then 829 the camera device will complete them in the optimal order for that device. 830 </details> 831 <hal_details> 832 The HAL must support triggering the AE precapture trigger while an AF trigger is active 833 (and vice versa), or at the same time as the AF trigger. It is acceptable for the HAL to 834 treat these as two consecutive triggers, for example handling the AF trigger and then the 835 AE trigger. Or the HAL may choose to optimize the case with both triggers fired at once, 836 to minimize the latency for converging both focus and exposure/flash usage. 837 </hal_details> 838 <tag id="BC" /> 839 </entry> 840 <entry name="afMode" type="byte" visibility="public" enum="true" 841 hwlevel="legacy"> 842 <enum> 843 <value>OFF 844 <notes>The auto-focus routine does not control the lens; 845 android.lens.focusDistance is controlled by the 846 application.</notes></value> 847 <value>AUTO 848 <notes>Basic automatic focus mode. 849 850 In this mode, the lens does not move unless 851 the autofocus trigger action is called. When that trigger 852 is activated, AF will transition to ACTIVE_SCAN, then to 853 the outcome of the scan (FOCUSED or NOT_FOCUSED). 854 855 Always supported if lens is not fixed focus. 856 857 Use android.lens.info.minimumFocusDistance to determine if lens 858 is fixed-focus. 859 860 Triggering AF_CANCEL resets the lens position to default, 861 and sets the AF state to INACTIVE.</notes></value> 862 <value>MACRO 863 <notes>Close-up focusing mode. 864 865 In this mode, the lens does not move unless the 866 autofocus trigger action is called. When that trigger is 867 activated, AF will transition to ACTIVE_SCAN, then to 868 the outcome of the scan (FOCUSED or NOT_FOCUSED). This 869 mode is optimized for focusing on objects very close to 870 the camera. 871 872 When that trigger is activated, AF will transition to 873 ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or 874 NOT_FOCUSED). Triggering cancel AF resets the lens 875 position to default, and sets the AF state to 876 INACTIVE.</notes></value> 877 <value>CONTINUOUS_VIDEO 878 <notes>In this mode, the AF algorithm modifies the lens 879 position continually to attempt to provide a 880 constantly-in-focus image stream. 881 882 The focusing behavior should be suitable for good quality 883 video recording; typically this means slower focus 884 movement and no overshoots. When the AF trigger is not 885 involved, the AF algorithm should start in INACTIVE state, 886 and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED 887 states as appropriate. When the AF trigger is activated, 888 the algorithm should immediately transition into 889 AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the 890 lens position until a cancel AF trigger is received. 891 892 Once cancel is received, the algorithm should transition 893 back to INACTIVE and resume passive scan. Note that this 894 behavior is not identical to CONTINUOUS_PICTURE, since an 895 ongoing PASSIVE_SCAN must immediately be 896 canceled.</notes></value> 897 <value>CONTINUOUS_PICTURE 898 <notes>In this mode, the AF algorithm modifies the lens 899 position continually to attempt to provide a 900 constantly-in-focus image stream. 901 902 The focusing behavior should be suitable for still image 903 capture; typically this means focusing as fast as 904 possible. When the AF trigger is not involved, the AF 905 algorithm should start in INACTIVE state, and then 906 transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as 907 appropriate as it attempts to maintain focus. When the AF 908 trigger is activated, the algorithm should finish its 909 PASSIVE_SCAN if active, and then transition into 910 AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the 911 lens position until a cancel AF trigger is received. 912 913 When the AF cancel trigger is activated, the algorithm 914 should transition back to INACTIVE and then act as if it 915 has just been started.</notes></value> 916 <value>EDOF 917 <notes>Extended depth of field (digital focus) mode. 918 919 The camera device will produce images with an extended 920 depth of field automatically; no special focusing 921 operations need to be done before taking a picture. 922 923 AF triggers are ignored, and the AF state will always be 924 INACTIVE.</notes></value> 925 </enum> 926 <description>Whether auto-focus (AF) is currently enabled, and what 927 mode it is set to.</description> 928 <range>android.control.afAvailableModes</range> 929 <details>Only effective if android.control.mode = AUTO and the lens is not fixed focus 930 (i.e. `android.lens.info.minimumFocusDistance > 0`). Also note that 931 when android.control.aeMode is OFF, the behavior of AF is device 932 dependent. It is recommended to lock AF by using android.control.afTrigger before 933 setting android.control.aeMode to OFF, or set AF mode to OFF when AE is OFF. 934 935 If the lens is controlled by the camera device auto-focus algorithm, 936 the camera device will report the current AF status in android.control.afState 937 in result metadata.</details> 938 <hal_details> 939 When afMode is AUTO or MACRO, the lens must not move until an AF trigger is sent in a 940 request (android.control.afTrigger `==` START). After an AF trigger, the afState will end 941 up with either FOCUSED_LOCKED or NOT_FOCUSED_LOCKED state (see 942 android.control.afState for detailed state transitions), which indicates that the lens is 943 locked and will not move. If camera movement (e.g. tilting camera) causes the lens to move 944 after the lens is locked, the HAL must compensate this movement appropriately such that 945 the same focal plane remains in focus. 946 947 When afMode is one of the continuous auto focus modes, the HAL is free to start a AF 948 scan whenever it's not locked. When the lens is locked after an AF trigger 949 (see android.control.afState for detailed state transitions), the HAL should maintain the 950 same lock behavior as above. 951 952 When afMode is OFF, the application controls focus manually. The accuracy of the 953 focus distance control depends on the android.lens.info.focusDistanceCalibration. 954 However, the lens must not move regardless of the camera movement for any focus distance 955 manual control. 956 957 To put this in concrete terms, if the camera has lens elements which may move based on 958 camera orientation or motion (e.g. due to gravity), then the HAL must drive the lens to 959 remain in a fixed position invariant to the camera's orientation or motion, for example, 960 by using accelerometer measurements in the lens control logic. This is a typical issue 961 that will arise on camera modules with open-loop VCMs. 962 </hal_details> 963 <tag id="BC" /> 964 </entry> 965 <entry name="afRegions" type="int32" visibility="public" 966 optional="true" container="array" typedef="meteringRectangle"> 967 <array> 968 <size>5</size> 969 <size>area_count</size> 970 </array> 971 <description>List of metering areas to use for auto-focus.</description> 972 <units>Pixel coordinates within android.sensor.info.activeArraySize</units> 973 <range>Coordinates must be between `[(0,0), (width, height))` of 974 android.sensor.info.activeArraySize</range> 975 <details> 976 Not available if android.control.maxRegionsAf is 0. 977 Otherwise will always be present. 978 979 The maximum number of focus areas supported by the device is determined by the value 980 of android.control.maxRegionsAf. 981 982 The coordinate system is based on the active pixel array, 983 with (0,0) being the top-left pixel in the active pixel array, and 984 (android.sensor.info.activeArraySize.width - 1, 985 android.sensor.info.activeArraySize.height - 1) being the 986 bottom-right pixel in the active pixel array. 987 988 The weight must be within `[0, 1000]`, and represents a weight 989 for every pixel in the area. This means that a large metering area 990 with the same weight as a smaller area will have more effect in 991 the metering result. Metering areas can partially overlap and the 992 camera device will add the weights in the overlap region. 993 994 The weights are relative to weights of other metering regions, so if only one region 995 is used, all non-zero weights will have the same effect. A region with 0 weight is 996 ignored. 997 998 If all regions have 0 weight, then no specific metering area needs to be used by the 999 camera device. The capture result will either be a zero weight region as well, or 1000 the region selected by the camera device as the focus area of interest. 1001 1002 If the metering region is outside the used android.scaler.cropRegion returned in 1003 capture result metadata, the camera device will ignore the sections outside the crop 1004 region and output only the intersection rectangle as the metering region in the result 1005 metadata. If the region is entirely outside the crop region, it will be ignored and 1006 not reported in the result metadata. 1007 </details> 1008 <ndk_details> 1009 The data representation is `int[5 * area_count]`. 1010 Every five elements represent a metering region of `(xmin, ymin, xmax, ymax, weight)`. 1011 The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and 1012 ymax. 1013 </ndk_details> 1014 <hal_details> 1015 The HAL level representation of MeteringRectangle[] is a 1016 int[5 * area_count]. 1017 Every five elements represent a metering region of 1018 (xmin, ymin, xmax, ymax, weight). 1019 The rectangle is defined to be inclusive on xmin and ymin, but 1020 exclusive on xmax and ymax. 1021 </hal_details> 1022 <tag id="BC" /> 1023 </entry> 1024 <entry name="afTrigger" type="byte" visibility="public" enum="true" 1025 hwlevel="legacy"> 1026 <enum> 1027 <value>IDLE 1028 <notes>The trigger is idle.</notes> 1029 </value> 1030 <value>START 1031 <notes>Autofocus will trigger now.</notes> 1032 </value> 1033 <value>CANCEL 1034 <notes>Autofocus will return to its initial 1035 state, and cancel any currently active trigger.</notes> 1036 </value> 1037 </enum> 1038 <description> 1039 Whether the camera device will trigger autofocus for this request. 1040 </description> 1041 <details>This entry is normally set to IDLE, or is not 1042 included at all in the request settings. 1043 1044 When included and set to START, the camera device will trigger the 1045 autofocus algorithm. If autofocus is disabled, this trigger has no effect. 1046 1047 When set to CANCEL, the camera device will cancel any active trigger, 1048 and return to its initial AF state. 1049 1050 Generally, applications should set this entry to START or CANCEL for only a 1051 single capture, and then return it to IDLE (or not set at all). Specifying 1052 START for multiple captures in a row means restarting the AF operation over 1053 and over again. 1054 1055 See android.control.afState for what the trigger means for each AF mode. 1056 1057 Using the autofocus trigger and the precapture trigger android.control.aePrecaptureTrigger 1058 simultaneously is allowed. However, since these triggers often require cooperation between 1059 the auto-focus and auto-exposure routines (for example, the may need to be enabled for a 1060 focus sweep), the camera device may delay acting on a later trigger until the previous 1061 trigger has been fully handled. This may lead to longer intervals between the trigger and 1062 changes to android.control.afState, for example. 1063 </details> 1064 <hal_details> 1065 The HAL must support triggering the AF trigger while an AE precapture trigger is active 1066 (and vice versa), or at the same time as the AE trigger. It is acceptable for the HAL to 1067 treat these as two consecutive triggers, for example handling the AF trigger and then the 1068 AE trigger. Or the HAL may choose to optimize the case with both triggers fired at once, 1069 to minimize the latency for converging both focus and exposure/flash usage. 1070 </hal_details> 1071 <tag id="BC" /> 1072 </entry> 1073 <entry name="awbLock" type="byte" visibility="public" enum="true" 1074 typedef="boolean" hwlevel="legacy"> 1075 <enum> 1076 <value>OFF 1077 <notes>Auto-white balance lock is disabled; the AWB 1078 algorithm is free to update its parameters if in AUTO 1079 mode.</notes></value> 1080 <value>ON 1081 <notes>Auto-white balance lock is enabled; the AWB 1082 algorithm will not update its parameters while the lock 1083 is active.</notes></value> 1084 </enum> 1085 <description>Whether auto-white balance (AWB) is currently locked to its 1086 latest calculated values.</description> 1087 <details> 1088 When set to `true` (ON), the AWB algorithm is locked to its latest parameters, 1089 and will not change color balance settings until the lock is set to `false` (OFF). 1090 1091 Since the camera device has a pipeline of in-flight requests, the settings that 1092 get locked do not necessarily correspond to the settings that were present in the 1093 latest capture result received from the camera device, since additional captures 1094 and AWB updates may have occurred even before the result was sent out. If an 1095 application is switching between automatic and manual control and wishes to eliminate 1096 any flicker during the switch, the following procedure is recommended: 1097 1098 1. Starting in auto-AWB mode: 1099 2. Lock AWB 1100 3. Wait for the first result to be output that has the AWB locked 1101 4. Copy AWB settings from that result into a request, set the request to manual AWB 1102 5. Submit the capture request, proceed to run manual AWB as desired. 1103 1104 Note that AWB lock is only meaningful when 1105 android.control.awbMode is in the AUTO mode; in other modes, 1106 AWB is already fixed to a specific setting. 1107 1108 Some LEGACY devices may not support ON; the value is then overridden to OFF. 1109 </details> 1110 <tag id="BC" /> 1111 </entry> 1112 <entry name="awbMode" type="byte" visibility="public" enum="true" 1113 hwlevel="legacy"> 1114 <enum> 1115 <value>OFF 1116 <notes> 1117 The camera device's auto-white balance routine is disabled. 1118 1119 The application-selected color transform matrix 1120 (android.colorCorrection.transform) and gains 1121 (android.colorCorrection.gains) are used by the camera 1122 device for manual white balance control. 1123 </notes> 1124 </value> 1125 <value>AUTO 1126 <notes> 1127 The camera device's auto-white balance routine is active. 1128 1129 The application's values for android.colorCorrection.transform 1130 and android.colorCorrection.gains are ignored. 1131 For devices that support the MANUAL_POST_PROCESSING capability, the 1132 values used by the camera device for the transform and gains 1133 will be available in the capture result for this request. 1134 </notes> 1135 </value> 1136 <value>INCANDESCENT 1137 <notes> 1138 The camera device's auto-white balance routine is disabled; 1139 the camera device uses incandescent light as the assumed scene 1140 illumination for white balance. 1141 1142 While the exact white balance transforms are up to the 1143 camera device, they will approximately match the CIE 1144 standard illuminant A. 1145 1146 The application's values for android.colorCorrection.transform 1147 and android.colorCorrection.gains are ignored. 1148 For devices that support the MANUAL_POST_PROCESSING capability, the 1149 values used by the camera device for the transform and gains 1150 will be available in the capture result for this request. 1151 </notes> 1152 </value> 1153 <value>FLUORESCENT 1154 <notes> 1155 The camera device's auto-white balance routine is disabled; 1156 the camera device uses fluorescent light as the assumed scene 1157 illumination for white balance. 1158 1159 While the exact white balance transforms are up to the 1160 camera device, they will approximately match the CIE 1161 standard illuminant F2. 1162 1163 The application's values for android.colorCorrection.transform 1164 and android.colorCorrection.gains are ignored. 1165 For devices that support the MANUAL_POST_PROCESSING capability, the 1166 values used by the camera device for the transform and gains 1167 will be available in the capture result for this request. 1168 </notes> 1169 </value> 1170 <value>WARM_FLUORESCENT 1171 <notes> 1172 The camera device's auto-white balance routine is disabled; 1173 the camera device uses warm fluorescent light as the assumed scene 1174 illumination for white balance. 1175 1176 While the exact white balance transforms are up to the 1177 camera device, they will approximately match the CIE 1178 standard illuminant F4. 1179 1180 The application's values for android.colorCorrection.transform 1181 and android.colorCorrection.gains are ignored. 1182 For devices that support the MANUAL_POST_PROCESSING capability, the 1183 values used by the camera device for the transform and gains 1184 will be available in the capture result for this request. 1185 </notes> 1186 </value> 1187 <value>DAYLIGHT 1188 <notes> 1189 The camera device's auto-white balance routine is disabled; 1190 the camera device uses daylight light as the assumed scene 1191 illumination for white balance. 1192 1193 While the exact white balance transforms are up to the 1194 camera device, they will approximately match the CIE 1195 standard illuminant D65. 1196 1197 The application's values for android.colorCorrection.transform 1198 and android.colorCorrection.gains are ignored. 1199 For devices that support the MANUAL_POST_PROCESSING capability, the 1200 values used by the camera device for the transform and gains 1201 will be available in the capture result for this request. 1202 </notes> 1203 </value> 1204 <value>CLOUDY_DAYLIGHT 1205 <notes> 1206 The camera device's auto-white balance routine is disabled; 1207 the camera device uses cloudy daylight light as the assumed scene 1208 illumination for white balance. 1209 1210 The application's values for android.colorCorrection.transform 1211 and android.colorCorrection.gains are ignored. 1212 For devices that support the MANUAL_POST_PROCESSING capability, the 1213 values used by the camera device for the transform and gains 1214 will be available in the capture result for this request. 1215 </notes> 1216 </value> 1217 <value>TWILIGHT 1218 <notes> 1219 The camera device's auto-white balance routine is disabled; 1220 the camera device uses twilight light as the assumed scene 1221 illumination for white balance. 1222 1223 The application's values for android.colorCorrection.transform 1224 and android.colorCorrection.gains are ignored. 1225 For devices that support the MANUAL_POST_PROCESSING capability, the 1226 values used by the camera device for the transform and gains 1227 will be available in the capture result for this request. 1228 </notes> 1229 </value> 1230 <value>SHADE 1231 <notes> 1232 The camera device's auto-white balance routine is disabled; 1233 the camera device uses shade light as the assumed scene 1234 illumination for white balance. 1235 1236 The application's values for android.colorCorrection.transform 1237 and android.colorCorrection.gains are ignored. 1238 For devices that support the MANUAL_POST_PROCESSING capability, the 1239 values used by the camera device for the transform and gains 1240 will be available in the capture result for this request. 1241 </notes> 1242 </value> 1243 </enum> 1244 <description>Whether auto-white balance (AWB) is currently setting the color 1245 transform fields, and what its illumination target 1246 is.</description> 1247 <range>android.control.awbAvailableModes</range> 1248 <details> 1249 This control is only effective if android.control.mode is AUTO. 1250 1251 When set to the ON mode, the camera device's auto-white balance 1252 routine is enabled, overriding the application's selected 1253 android.colorCorrection.transform, android.colorCorrection.gains and 1254 android.colorCorrection.mode. Note that when android.control.aeMode 1255 is OFF, the behavior of AWB is device dependent. It is recommened to 1256 also set AWB mode to OFF or lock AWB by using android.control.awbLock before 1257 setting AE mode to OFF. 1258 1259 When set to the OFF mode, the camera device's auto-white balance 1260 routine is disabled. The application manually controls the white 1261 balance by android.colorCorrection.transform, android.colorCorrection.gains 1262 and android.colorCorrection.mode. 1263 1264 When set to any other modes, the camera device's auto-white 1265 balance routine is disabled. The camera device uses each 1266 particular illumination target for white balance 1267 adjustment. The application's values for 1268 android.colorCorrection.transform, 1269 android.colorCorrection.gains and 1270 android.colorCorrection.mode are ignored. 1271 </details> 1272 <tag id="BC" /> 1273 </entry> 1274 <entry name="awbRegions" type="int32" visibility="public" 1275 optional="true" container="array" typedef="meteringRectangle"> 1276 <array> 1277 <size>5</size> 1278 <size>area_count</size> 1279 </array> 1280 <description>List of metering areas to use for auto-white-balance illuminant 1281 estimation.</description> 1282 <units>Pixel coordinates within android.sensor.info.activeArraySize</units> 1283 <range>Coordinates must be between `[(0,0), (width, height))` of 1284 android.sensor.info.activeArraySize</range> 1285 <details> 1286 Not available if android.control.maxRegionsAwb is 0. 1287 Otherwise will always be present. 1288 1289 The maximum number of regions supported by the device is determined by the value 1290 of android.control.maxRegionsAwb. 1291 1292 The coordinate system is based on the active pixel array, 1293 with (0,0) being the top-left pixel in the active pixel array, and 1294 (android.sensor.info.activeArraySize.width - 1, 1295 android.sensor.info.activeArraySize.height - 1) being the 1296 bottom-right pixel in the active pixel array. 1297 1298 The weight must range from 0 to 1000, and represents a weight 1299 for every pixel in the area. This means that a large metering area 1300 with the same weight as a smaller area will have more effect in 1301 the metering result. Metering areas can partially overlap and the 1302 camera device will add the weights in the overlap region. 1303 1304 The weights are relative to weights of other white balance metering regions, so if 1305 only one region is used, all non-zero weights will have the same effect. A region with 1306 0 weight is ignored. 1307 1308 If all regions have 0 weight, then no specific metering area needs to be used by the 1309 camera device. 1310 1311 If the metering region is outside the used android.scaler.cropRegion returned in 1312 capture result metadata, the camera device will ignore the sections outside the crop 1313 region and output only the intersection rectangle as the metering region in the result 1314 metadata. If the region is entirely outside the crop region, it will be ignored and 1315 not reported in the result metadata. 1316 </details> 1317 <ndk_details> 1318 The data representation is `int[5 * area_count]`. 1319 Every five elements represent a metering region of `(xmin, ymin, xmax, ymax, weight)`. 1320 The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and 1321 ymax. 1322 </ndk_details> 1323 <hal_details> 1324 The HAL level representation of MeteringRectangle[] is a 1325 int[5 * area_count]. 1326 Every five elements represent a metering region of 1327 (xmin, ymin, xmax, ymax, weight). 1328 The rectangle is defined to be inclusive on xmin and ymin, but 1329 exclusive on xmax and ymax. 1330 </hal_details> 1331 <tag id="BC" /> 1332 </entry> 1333 <entry name="captureIntent" type="byte" visibility="public" enum="true" 1334 hwlevel="legacy"> 1335 <enum> 1336 <value>CUSTOM 1337 <notes>The goal of this request doesn't fall into the other 1338 categories. The camera device will default to preview-like 1339 behavior.</notes></value> 1340 <value>PREVIEW 1341 <notes>This request is for a preview-like use case. 1342 1343 The precapture trigger may be used to start off a metering 1344 w/flash sequence. 1345 </notes></value> 1346 <value>STILL_CAPTURE 1347 <notes>This request is for a still capture-type 1348 use case. 1349 1350 If the flash unit is under automatic control, it may fire as needed. 1351 </notes></value> 1352 <value>VIDEO_RECORD 1353 <notes>This request is for a video recording 1354 use case.</notes></value> 1355 <value>VIDEO_SNAPSHOT 1356 <notes>This request is for a video snapshot (still 1357 image while recording video) use case. 1358 1359 The camera device should take the highest-quality image 1360 possible (given the other settings) without disrupting the 1361 frame rate of video recording. </notes></value> 1362 <value>ZERO_SHUTTER_LAG 1363 <notes>This request is for a ZSL usecase; the 1364 application will stream full-resolution images and 1365 reprocess one or several later for a final 1366 capture. 1367 </notes></value> 1368 <value>MANUAL 1369 <notes>This request is for manual capture use case where 1370 the applications want to directly control the capture parameters. 1371 1372 For example, the application may wish to manually control 1373 android.sensor.exposureTime, android.sensor.sensitivity, etc. 1374 </notes></value> 1375 <value hal_version="3.3">MOTION_TRACKING 1376 <notes>This request is for a motion tracking use case, where 1377 the application will use camera and inertial sensor data to 1378 locate and track objects in the world. 1379 1380 The camera device auto-exposure routine will limit the exposure time 1381 of the camera to no more than 20 milliseconds, to minimize motion blur. 1382 </notes></value> 1383 </enum> 1384 <description>Information to the camera device 3A (auto-exposure, 1385 auto-focus, auto-white balance) routines about the purpose 1386 of this capture, to help the camera device to decide optimal 3A 1387 strategy.</description> 1388 <details>This control (except for MANUAL) is only effective if 1389 `android.control.mode != OFF` and any 3A routine is active. 1390 1391 All intents are supported by all devices, except that: 1392 * ZERO_SHUTTER_LAG will be supported if android.request.availableCapabilities contains 1393 PRIVATE_REPROCESSING or YUV_REPROCESSING. 1394 * MANUAL will be supported if android.request.availableCapabilities contains 1395 MANUAL_SENSOR. 1396 * MOTION_TRACKING will be supported if android.request.availableCapabilities contains 1397 MOTION_TRACKING. 1398 </details> 1399 <tag id="BC" /> 1400 </entry> 1401 <entry name="effectMode" type="byte" visibility="public" enum="true" 1402 hwlevel="legacy"> 1403 <enum> 1404 <value>OFF 1405 <notes> 1406 No color effect will be applied. 1407 </notes> 1408 </value> 1409 <value optional="true">MONO 1410 <notes> 1411 A "monocolor" effect where the image is mapped into 1412 a single color. 1413 1414 This will typically be grayscale. 1415 </notes> 1416 </value> 1417 <value optional="true">NEGATIVE 1418 <notes> 1419 A "photo-negative" effect where the image's colors 1420 are inverted. 1421 </notes> 1422 </value> 1423 <value optional="true">SOLARIZE 1424 <notes> 1425 A "solarisation" effect (Sabattier effect) where the 1426 image is wholly or partially reversed in 1427 tone. 1428 </notes> 1429 </value> 1430 <value optional="true">SEPIA 1431 <notes> 1432 A "sepia" effect where the image is mapped into warm 1433 gray, red, and brown tones. 1434 </notes> 1435 </value> 1436 <value optional="true">POSTERIZE 1437 <notes> 1438 A "posterization" effect where the image uses 1439 discrete regions of tone rather than a continuous 1440 gradient of tones. 1441 </notes> 1442 </value> 1443 <value optional="true">WHITEBOARD 1444 <notes> 1445 A "whiteboard" effect where the image is typically displayed 1446 as regions of white, with black or grey details. 1447 </notes> 1448 </value> 1449 <value optional="true">BLACKBOARD 1450 <notes> 1451 A "blackboard" effect where the image is typically displayed 1452 as regions of black, with white or grey details. 1453 </notes> 1454 </value> 1455 <value optional="true">AQUA 1456 <notes> 1457 An "aqua" effect where a blue hue is added to the image. 1458 </notes> 1459 </value> 1460 </enum> 1461 <description>A special color effect to apply.</description> 1462 <range>android.control.availableEffects</range> 1463 <details> 1464 When this mode is set, a color effect will be applied 1465 to images produced by the camera device. The interpretation 1466 and implementation of these color effects is left to the 1467 implementor of the camera device, and should not be 1468 depended on to be consistent (or present) across all 1469 devices. 1470 </details> 1471 <tag id="BC" /> 1472 </entry> 1473 <entry name="mode" type="byte" visibility="public" enum="true" 1474 hwlevel="legacy"> 1475 <enum> 1476 <value>OFF 1477 <notes>Full application control of pipeline. 1478 1479 All control by the device's metering and focusing (3A) 1480 routines is disabled, and no other settings in 1481 android.control.* have any effect, except that 1482 android.control.captureIntent may be used by the camera 1483 device to select post-processing values for processing 1484 blocks that do not allow for manual control, or are not 1485 exposed by the camera API. 1486 1487 However, the camera device's 3A routines may continue to 1488 collect statistics and update their internal state so that 1489 when control is switched to AUTO mode, good control values 1490 can be immediately applied. 1491 </notes></value> 1492 <value>AUTO 1493 <notes>Use settings for each individual 3A routine. 1494 1495 Manual control of capture parameters is disabled. All 1496 controls in android.control.* besides sceneMode take 1497 effect.</notes></value> 1498 <value optional="true">USE_SCENE_MODE 1499 <notes>Use a specific scene mode. 1500 1501 Enabling this disables control.aeMode, control.awbMode and 1502 control.afMode controls; the camera device will ignore 1503 those settings while USE_SCENE_MODE is active (except for 1504 FACE_PRIORITY scene mode). Other control entries are still active. 1505 This setting can only be used if scene mode is supported (i.e. 1506 android.control.availableSceneModes 1507 contain some modes other than DISABLED).</notes></value> 1508 <value optional="true">OFF_KEEP_STATE 1509 <notes>Same as OFF mode, except that this capture will not be 1510 used by camera device background auto-exposure, auto-white balance and 1511 auto-focus algorithms (3A) to update their statistics. 1512 1513 Specifically, the 3A routines are locked to the last 1514 values set from a request with AUTO, OFF, or 1515 USE_SCENE_MODE, and any statistics or state updates 1516 collected from manual captures with OFF_KEEP_STATE will be 1517 discarded by the camera device. 1518 </notes></value> 1519 </enum> 1520 <description>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control 1521 routines.</description> 1522 <range>android.control.availableModes</range> 1523 <details> 1524 This is a top-level 3A control switch. When set to OFF, all 3A control 1525 by the camera device is disabled. The application must set the fields for 1526 capture parameters itself. 1527 1528 When set to AUTO, the individual algorithm controls in 1529 android.control.* are in effect, such as android.control.afMode. 1530 1531 When set to USE_SCENE_MODE, the individual controls in 1532 android.control.* are mostly disabled, and the camera device 1533 implements one of the scene mode settings (such as ACTION, 1534 SUNSET, or PARTY) as it wishes. The camera device scene mode 1535 3A settings are provided by {@link 1536 android.hardware.camera2.CaptureResult|ACameraCaptureSession_captureCallback_result 1537 capture results}. 1538 1539 When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference 1540 is that this frame will not be used by camera device background 3A statistics 1541 update, as if this frame is never captured. This mode can be used in the scenario 1542 where the application doesn't want a 3A manual control capture to affect 1543 the subsequent auto 3A capture results. 1544 </details> 1545 <tag id="BC" /> 1546 </entry> 1547 <entry name="sceneMode" type="byte" visibility="public" enum="true" 1548 hwlevel="legacy"> 1549 <enum> 1550 <value id="0">DISABLED 1551 <notes> 1552 Indicates that no scene modes are set for a given capture request. 1553 </notes> 1554 </value> 1555 <value>FACE_PRIORITY 1556 <notes>If face detection support exists, use face 1557 detection data for auto-focus, auto-white balance, and 1558 auto-exposure routines. 1559 1560 If face detection statistics are disabled 1561 (i.e. android.statistics.faceDetectMode is set to OFF), 1562 this should still operate correctly (but will not return 1563 face detection statistics to the framework). 1564 1565 Unlike the other scene modes, android.control.aeMode, 1566 android.control.awbMode, and android.control.afMode 1567 remain active when FACE_PRIORITY is set. 1568 </notes> 1569 </value> 1570 <value optional="true">ACTION 1571 <notes> 1572 Optimized for photos of quickly moving objects. 1573 1574 Similar to SPORTS. 1575 </notes> 1576 </value> 1577 <value optional="true">PORTRAIT 1578 <notes> 1579 Optimized for still photos of people. 1580 </notes> 1581 </value> 1582 <value optional="true">LANDSCAPE 1583 <notes> 1584 Optimized for photos of distant macroscopic objects. 1585 </notes> 1586 </value> 1587 <value optional="true">NIGHT 1588 <notes> 1589 Optimized for low-light settings. 1590 </notes> 1591 </value> 1592 <value optional="true">NIGHT_PORTRAIT 1593 <notes> 1594 Optimized for still photos of people in low-light 1595 settings. 1596 </notes> 1597 </value> 1598 <value optional="true">THEATRE 1599 <notes> 1600 Optimized for dim, indoor settings where flash must 1601 remain off. 1602 </notes> 1603 </value> 1604 <value optional="true">BEACH 1605 <notes> 1606 Optimized for bright, outdoor beach settings. 1607 </notes> 1608 </value> 1609 <value optional="true">SNOW 1610 <notes> 1611 Optimized for bright, outdoor settings containing snow. 1612 </notes> 1613 </value> 1614 <value optional="true">SUNSET 1615 <notes> 1616 Optimized for scenes of the setting sun. 1617 </notes> 1618 </value> 1619 <value optional="true">STEADYPHOTO 1620 <notes> 1621 Optimized to avoid blurry photos due to small amounts of 1622 device motion (for example: due to hand shake). 1623 </notes> 1624 </value> 1625 <value optional="true">FIREWORKS 1626 <notes> 1627 Optimized for nighttime photos of fireworks. 1628 </notes> 1629 </value> 1630 <value optional="true">SPORTS 1631 <notes> 1632 Optimized for photos of quickly moving people. 1633 1634 Similar to ACTION. 1635 </notes> 1636 </value> 1637 <value optional="true">PARTY 1638 <notes> 1639 Optimized for dim, indoor settings with multiple moving 1640 people. 1641 </notes> 1642 </value> 1643 <value optional="true">CANDLELIGHT 1644 <notes> 1645 Optimized for dim settings where the main light source 1646 is a flame. 1647 </notes> 1648 </value> 1649 <value optional="true">BARCODE 1650 <notes> 1651 Optimized for accurately capturing a photo of barcode 1652 for use by camera applications that wish to read the 1653 barcode value. 1654 </notes> 1655 </value> 1656 <value deprecated="true" optional="true" ndk_hidden="true">HIGH_SPEED_VIDEO 1657 <notes> 1658 This is deprecated, please use {@link 1659 android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession} 1660 and {@link 1661 android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList} 1662 for high speed video recording. 1663 1664 Optimized for high speed video recording (frame rate >=60fps) use case. 1665 1666 The supported high speed video sizes and fps ranges are specified in 1667 android.control.availableHighSpeedVideoConfigurations. To get desired 1668 output frame rates, the application is only allowed to select video size 1669 and fps range combinations listed in this static metadata. The fps range 1670 can be control via android.control.aeTargetFpsRange. 1671 1672 In this mode, the camera device will override aeMode, awbMode, and afMode to 1673 ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode 1674 controls will be overridden to be FAST. Therefore, no manual control of capture 1675 and post-processing parameters is possible. All other controls operate the 1676 same as when android.control.mode == AUTO. This means that all other 1677 android.control.* fields continue to work, such as 1678 1679 * android.control.aeTargetFpsRange 1680 * android.control.aeExposureCompensation 1681 * android.control.aeLock 1682 * android.control.awbLock 1683 * android.control.effectMode 1684 * android.control.aeRegions 1685 * android.control.afRegions 1686 * android.control.awbRegions 1687 * android.control.afTrigger 1688 * android.control.aePrecaptureTrigger 1689 1690 Outside of android.control.*, the following controls will work: 1691 1692 * android.flash.mode (automatic flash for still capture will not work since aeMode is ON) 1693 * android.lens.opticalStabilizationMode (if it is supported) 1694 * android.scaler.cropRegion 1695 * android.statistics.faceDetectMode 1696 1697 For high speed recording use case, the actual maximum supported frame rate may 1698 be lower than what camera can output, depending on the destination Surfaces for 1699 the image data. For example, if the destination surface is from video encoder, 1700 the application need check if the video encoder is capable of supporting the 1701 high frame rate for a given video size, or it will end up with lower recording 1702 frame rate. If the destination surface is from preview window, the preview frame 1703 rate will be bounded by the screen refresh rate. 1704 1705 The camera device will only support up to 2 output high speed streams 1706 (processed non-stalling format defined in android.request.maxNumOutputStreams) 1707 in this mode. This control will be effective only if all of below conditions are true: 1708 1709 * The application created no more than maxNumHighSpeedStreams processed non-stalling 1710 format output streams, where maxNumHighSpeedStreams is calculated as 1711 min(2, android.request.maxNumOutputStreams[Processed (but not-stalling)]). 1712 * The stream sizes are selected from the sizes reported by 1713 android.control.availableHighSpeedVideoConfigurations. 1714 * No processed non-stalling or raw streams are configured. 1715 1716 When above conditions are NOT satistied, the controls of this mode and 1717 android.control.aeTargetFpsRange will be ignored by the camera device, 1718 the camera device will fall back to android.control.mode `==` AUTO, 1719 and the returned capture result metadata will give the fps range choosen 1720 by the camera device. 1721 1722 Switching into or out of this mode may trigger some camera ISP/sensor 1723 reconfigurations, which may introduce extra latency. It is recommended that 1724 the application avoids unnecessary scene mode switch as much as possible. 1725 </notes> 1726 </value> 1727 <value optional="true">HDR 1728 <notes> 1729 Turn on a device-specific high dynamic range (HDR) mode. 1730 1731 In this scene mode, the camera device captures images 1732 that keep a larger range of scene illumination levels 1733 visible in the final image. For example, when taking a 1734 picture of a object in front of a bright window, both 1735 the object and the scene through the window may be 1736 visible when using HDR mode, while in normal AUTO mode, 1737 one or the other may be poorly exposed. As a tradeoff, 1738 HDR mode generally takes much longer to capture a single 1739 image, has no user control, and may have other artifacts 1740 depending on the HDR method used. 1741 1742 Therefore, HDR captures operate at a much slower rate 1743 than regular captures. 1744 1745 In this mode, on LIMITED or FULL devices, when a request 1746 is made with a android.control.captureIntent of 1747 STILL_CAPTURE, the camera device will capture an image 1748 using a high dynamic range capture technique. On LEGACY 1749 devices, captures that target a JPEG-format output will 1750 be captured with HDR, and the capture intent is not 1751 relevant. 1752 1753 The HDR capture may involve the device capturing a burst 1754 of images internally and combining them into one, or it 1755 may involve the device using specialized high dynamic 1756 range capture hardware. In all cases, a single image is 1757 produced in response to a capture request submitted 1758 while in HDR mode. 1759 1760 Since substantial post-processing is generally needed to 1761 produce an HDR image, only YUV, PRIVATE, and JPEG 1762 outputs are supported for LIMITED/FULL device HDR 1763 captures, and only JPEG outputs are supported for LEGACY 1764 HDR captures. Using a RAW output for HDR capture is not 1765 supported. 1766 1767 Some devices may also support always-on HDR, which 1768 applies HDR processing at full frame rate. For these 1769 devices, intents other than STILL_CAPTURE will also 1770 produce an HDR output with no frame rate impact compared 1771 to normal operation, though the quality may be lower 1772 than for STILL_CAPTURE intents. 1773 1774 If SCENE_MODE_HDR is used with unsupported output types 1775 or capture intents, the images captured will be as if 1776 the SCENE_MODE was not enabled at all. 1777 </notes> 1778 </value> 1779 <value optional="true" hidden="true">FACE_PRIORITY_LOW_LIGHT 1780 <notes>Same as FACE_PRIORITY scene mode, except that the camera 1781 device will choose higher sensitivity values (android.sensor.sensitivity) 1782 under low light conditions. 1783 1784 The camera device may be tuned to expose the images in a reduced 1785 sensitivity range to produce the best quality images. For example, 1786 if the android.sensor.info.sensitivityRange gives range of [100, 1600], 1787 the camera device auto-exposure routine tuning process may limit the actual 1788 exposure sensitivity range to [100, 1200] to ensure that the noise level isn't 1789 exessive in order to preserve the image quality. Under this situation, the image under 1790 low light may be under-exposed when the sensor max exposure time (bounded by the 1791 android.control.aeTargetFpsRange when android.control.aeMode is one of the 1792 ON_* modes) and effective max sensitivity are reached. This scene mode allows the 1793 camera device auto-exposure routine to increase the sensitivity up to the max 1794 sensitivity specified by android.sensor.info.sensitivityRange when the scene is too 1795 dark and the max exposure time is reached. The captured images may be noisier 1796 compared with the images captured in normal FACE_PRIORITY mode; therefore, it is 1797 recommended that the application only use this scene mode when it is capable of 1798 reducing the noise level of the captured images. 1799 1800 Unlike the other scene modes, android.control.aeMode, 1801 android.control.awbMode, and android.control.afMode 1802 remain active when FACE_PRIORITY_LOW_LIGHT is set. 1803 </notes> 1804 </value> 1805 <value optional="true" hidden="true" id="100">DEVICE_CUSTOM_START 1806 <notes> 1807 Scene mode values within the range of 1808 `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific 1809 customized scene modes. 1810 </notes> 1811 </value> 1812 <value optional="true" hidden="true" id="127">DEVICE_CUSTOM_END 1813 <notes> 1814 Scene mode values within the range of 1815 `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific 1816 customized scene modes. 1817 </notes> 1818 </value> 1819 </enum> 1820 <description> 1821 Control for which scene mode is currently active. 1822 </description> 1823 <range>android.control.availableSceneModes</range> 1824 <details> 1825 Scene modes are custom camera modes optimized for a certain set of conditions and 1826 capture settings. 1827 1828 This is the mode that that is active when 1829 `android.control.mode == USE_SCENE_MODE`. Aside from FACE_PRIORITY, these modes will 1830 disable android.control.aeMode, android.control.awbMode, and android.control.afMode 1831 while in use. 1832 1833 The interpretation and implementation of these scene modes is left 1834 to the implementor of the camera device. Their behavior will not be 1835 consistent across all devices, and any given device may only implement 1836 a subset of these modes. 1837 </details> 1838 <hal_details> 1839 HAL implementations that include scene modes are expected to provide 1840 the per-scene settings to use for android.control.aeMode, 1841 android.control.awbMode, and android.control.afMode in 1842 android.control.sceneModeOverrides. 1843 1844 For HIGH_SPEED_VIDEO mode, if it is included in android.control.availableSceneModes, the 1845 HAL must list supported video size and fps range in 1846 android.control.availableHighSpeedVideoConfigurations. For a given size, e.g. 1280x720, 1847 if the HAL has two different sensor configurations for normal streaming mode and high 1848 speed streaming, when this scene mode is set/reset in a sequence of capture requests, the 1849 HAL may have to switch between different sensor modes. This mode is deprecated in legacy 1850 HAL3.3, to support high speed video recording, please implement 1851 android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO 1852 capbility defined in android.request.availableCapabilities. 1853 </hal_details> 1854 <tag id="BC" /> 1855 </entry> 1856 <entry name="videoStabilizationMode" type="byte" visibility="public" 1857 enum="true" hwlevel="legacy"> 1858 <enum> 1859 <value>OFF 1860 <notes> 1861 Video stabilization is disabled. 1862 </notes></value> 1863 <value>ON 1864 <notes> 1865 Video stabilization is enabled. 1866 </notes></value> 1867 </enum> 1868 <description>Whether video stabilization is 1869 active.</description> 1870 <details> 1871 Video stabilization automatically warps images from 1872 the camera in order to stabilize motion between consecutive frames. 1873 1874 If enabled, video stabilization can modify the 1875 android.scaler.cropRegion to keep the video stream stabilized. 1876 1877 Switching between different video stabilization modes may take several 1878 frames to initialize, the camera device will report the current mode 1879 in capture result metadata. For example, When "ON" mode is requested, 1880 the video stabilization modes in the first several capture results may 1881 still be "OFF", and it will become "ON" when the initialization is 1882 done. 1883 1884 In addition, not all recording sizes or frame rates may be supported for 1885 stabilization by a device that reports stabilization support. It is guaranteed 1886 that an output targeting a MediaRecorder or MediaCodec will be stabilized if 1887 the recording resolution is less than or equal to 1920 x 1080 (width less than 1888 or equal to 1920, height less than or equal to 1080), and the recording 1889 frame rate is less than or equal to 30fps. At other sizes, the CaptureResult 1890 android.control.videoStabilizationMode field will return 1891 OFF if the recording output is not stabilized, or if there are no output 1892 Surface types that can be stabilized. 1893 1894 If a camera device supports both this mode and OIS 1895 (android.lens.opticalStabilizationMode), turning both modes on may 1896 produce undesirable interaction, so it is recommended not to enable 1897 both at the same time. 1898 </details> 1899 <tag id="BC" /> 1900 </entry> 1901 </controls> 1902 <static> 1903 <entry name="aeAvailableAntibandingModes" type="byte" visibility="public" 1904 type_notes="list of enums" container="array" typedef="enumList" 1905 hwlevel="legacy"> 1906 <array> 1907 <size>n</size> 1908 </array> 1909 <description> 1910 List of auto-exposure antibanding modes for android.control.aeAntibandingMode that are 1911 supported by this camera device. 1912 </description> 1913 <range>Any value listed in android.control.aeAntibandingMode</range> 1914 <details> 1915 Not all of the auto-exposure anti-banding modes may be 1916 supported by a given camera device. This field lists the 1917 valid anti-banding modes that the application may request 1918 for this camera device with the 1919 android.control.aeAntibandingMode control. 1920 </details> 1921 <tag id="BC" /> 1922 </entry> 1923 <entry name="aeAvailableModes" type="byte" visibility="public" 1924 type_notes="list of enums" container="array" typedef="enumList" 1925 hwlevel="legacy"> 1926 <array> 1927 <size>n</size> 1928 </array> 1929 <description> 1930 List of auto-exposure modes for android.control.aeMode that are supported by this camera 1931 device. 1932 </description> 1933 <range>Any value listed in android.control.aeMode</range> 1934 <details> 1935 Not all the auto-exposure modes may be supported by a 1936 given camera device, especially if no flash unit is 1937 available. This entry lists the valid modes for 1938 android.control.aeMode for this camera device. 1939 1940 All camera devices support ON, and all camera devices with flash 1941 units support ON_AUTO_FLASH and ON_ALWAYS_FLASH. 1942 1943 FULL mode camera devices always support OFF mode, 1944 which enables application control of camera exposure time, 1945 sensitivity, and frame duration. 1946 1947 LEGACY mode camera devices never support OFF mode. 1948 LIMITED mode devices support OFF if they support the MANUAL_SENSOR 1949 capability. 1950 </details> 1951 <tag id="BC" /> 1952 </entry> 1953 <entry name="aeAvailableTargetFpsRanges" type="int32" visibility="public" 1954 type_notes="list of pairs of frame rates" 1955 container="array" typedef="rangeInt" 1956 hwlevel="legacy"> 1957 <array> 1958 <size>2</size> 1959 <size>n</size> 1960 </array> 1961 <description>List of frame rate ranges for android.control.aeTargetFpsRange supported by 1962 this camera device.</description> 1963 <units>Frames per second (FPS)</units> 1964 <details> 1965 For devices at the LEGACY level or above: 1966 1967 * For constant-framerate recording, for each normal 1968 {@link android.media.CamcorderProfile CamcorderProfile}, that is, a 1969 {@link android.media.CamcorderProfile CamcorderProfile} that has 1970 {@link android.media.CamcorderProfile#quality quality} in 1971 the range [{@link android.media.CamcorderProfile#QUALITY_LOW QUALITY_LOW}, 1972 {@link android.media.CamcorderProfile#QUALITY_2160P QUALITY_2160P}], if the profile is 1973 supported by the device and has 1974 {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} `x`, this list will 1975 always include (`x`,`x`). 1976 1977 * Also, a camera device must either not support any 1978 {@link android.media.CamcorderProfile CamcorderProfile}, 1979 or support at least one 1980 normal {@link android.media.CamcorderProfile CamcorderProfile} that has 1981 {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} `x` >= 24. 1982 1983 For devices at the LIMITED level or above: 1984 1985 * For YUV_420_888 burst capture use case, this list will always include (`min`, `max`) 1986 and (`max`, `max`) where `min` <= 15 and `max` = the maximum output frame rate of the 1987 maximum YUV_420_888 output size. 1988 </details> 1989 <tag id="BC" /> 1990 </entry> 1991 <entry name="aeCompensationRange" type="int32" visibility="public" 1992 container="array" typedef="rangeInt" 1993 hwlevel="legacy"> 1994 <array> 1995 <size>2</size> 1996 </array> 1997 <description>Maximum and minimum exposure compensation values for 1998 android.control.aeExposureCompensation, in counts of android.control.aeCompensationStep, 1999 that are supported by this camera device.</description> 2000 <range> 2001 Range [0,0] indicates that exposure compensation is not supported. 2002 2003 For LIMITED and FULL devices, range must follow below requirements if exposure 2004 compensation is supported (`range != [0, 0]`): 2005 2006 `Min.exposure compensation * android.control.aeCompensationStep <= -2 EV` 2007 2008 `Max.exposure compensation * android.control.aeCompensationStep >= 2 EV` 2009 2010 LEGACY devices may support a smaller range than this. 2011 </range> 2012 <tag id="BC" /> 2013 </entry> 2014 <entry name="aeCompensationStep" type="rational" visibility="public" 2015 hwlevel="legacy"> 2016 <description>Smallest step by which the exposure compensation 2017 can be changed.</description> 2018 <units>Exposure Value (EV)</units> 2019 <details> 2020 This is the unit for android.control.aeExposureCompensation. For example, if this key has 2021 a value of `1/2`, then a setting of `-2` for android.control.aeExposureCompensation means 2022 that the target EV offset for the auto-exposure routine is -1 EV. 2023 2024 One unit of EV compensation changes the brightness of the captured image by a factor 2025 of two. +1 EV doubles the image brightness, while -1 EV halves the image brightness. 2026 </details> 2027 <hal_details> 2028 This must be less than or equal to 1/2. 2029 </hal_details> 2030 <tag id="BC" /> 2031 </entry> 2032 <entry name="afAvailableModes" type="byte" visibility="public" 2033 type_notes="List of enums" container="array" typedef="enumList" 2034 hwlevel="legacy"> 2035 <array> 2036 <size>n</size> 2037 </array> 2038 <description> 2039 List of auto-focus (AF) modes for android.control.afMode that are 2040 supported by this camera device. 2041 </description> 2042 <range>Any value listed in android.control.afMode</range> 2043 <details> 2044 Not all the auto-focus modes may be supported by a 2045 given camera device. This entry lists the valid modes for 2046 android.control.afMode for this camera device. 2047 2048 All LIMITED and FULL mode camera devices will support OFF mode, and all 2049 camera devices with adjustable focuser units 2050 (`android.lens.info.minimumFocusDistance > 0`) will support AUTO mode. 2051 2052 LEGACY devices will support OFF mode only if they support 2053 focusing to infinity (by also setting android.lens.focusDistance to 2054 `0.0f`). 2055 </details> 2056 <tag id="BC" /> 2057 </entry> 2058 <entry name="availableEffects" type="byte" visibility="public" 2059 type_notes="List of enums (android.control.effectMode)." container="array" 2060 typedef="enumList" hwlevel="legacy"> 2061 <array> 2062 <size>n</size> 2063 </array> 2064 <description> 2065 List of color effects for android.control.effectMode that are supported by this camera 2066 device. 2067 </description> 2068 <range>Any value listed in android.control.effectMode</range> 2069 <details> 2070 This list contains the color effect modes that can be applied to 2071 images produced by the camera device. 2072 Implementations are not expected to be consistent across all devices. 2073 If no color effect modes are available for a device, this will only list 2074 OFF. 2075 2076 A color effect will only be applied if 2077 android.control.mode != OFF. OFF is always included in this list. 2078 2079 This control has no effect on the operation of other control routines such 2080 as auto-exposure, white balance, or focus. 2081 </details> 2082 <tag id="BC" /> 2083 </entry> 2084 <entry name="availableSceneModes" type="byte" visibility="public" 2085 type_notes="List of enums (android.control.sceneMode)." 2086 container="array" typedef="enumList" hwlevel="legacy"> 2087 <array> 2088 <size>n</size> 2089 </array> 2090 <description> 2091 List of scene modes for android.control.sceneMode that are supported by this camera 2092 device. 2093 </description> 2094 <range>Any value listed in android.control.sceneMode</range> 2095 <details> 2096 This list contains scene modes that can be set for the camera device. 2097 Only scene modes that have been fully implemented for the 2098 camera device may be included here. Implementations are not expected 2099 to be consistent across all devices. 2100 2101 If no scene modes are supported by the camera device, this 2102 will be set to DISABLED. Otherwise DISABLED will not be listed. 2103 2104 FACE_PRIORITY is always listed if face detection is 2105 supported (i.e.`android.statistics.info.maxFaceCount > 2106 0`). 2107 </details> 2108 <tag id="BC" /> 2109 </entry> 2110 <entry name="availableVideoStabilizationModes" type="byte" 2111 visibility="public" type_notes="List of enums." container="array" 2112 typedef="enumList" hwlevel="legacy"> 2113 <array> 2114 <size>n</size> 2115 </array> 2116 <description> 2117 List of video stabilization modes for android.control.videoStabilizationMode 2118 that are supported by this camera device. 2119 </description> 2120 <range>Any value listed in android.control.videoStabilizationMode</range> 2121 <details> 2122 OFF will always be listed. 2123 </details> 2124 <tag id="BC" /> 2125 </entry> 2126 <entry name="awbAvailableModes" type="byte" visibility="public" 2127 type_notes="List of enums" 2128 container="array" typedef="enumList" hwlevel="legacy"> 2129 <array> 2130 <size>n</size> 2131 </array> 2132 <description> 2133 List of auto-white-balance modes for android.control.awbMode that are supported by this 2134 camera device. 2135 </description> 2136 <range>Any value listed in android.control.awbMode</range> 2137 <details> 2138 Not all the auto-white-balance modes may be supported by a 2139 given camera device. This entry lists the valid modes for 2140 android.control.awbMode for this camera device. 2141 2142 All camera devices will support ON mode. 2143 2144 Camera devices that support the MANUAL_POST_PROCESSING capability will always support OFF 2145 mode, which enables application control of white balance, by using 2146 android.colorCorrection.transform and android.colorCorrection.gains 2147 (android.colorCorrection.mode must be set to TRANSFORM_MATRIX). This includes all FULL 2148 mode camera devices. 2149 </details> 2150 <tag id="BC" /> 2151 </entry> 2152 <entry name="maxRegions" type="int32" visibility="ndk_public" 2153 container="array" hwlevel="legacy"> 2154 <array> 2155 <size>3</size> 2156 </array> 2157 <description> 2158 List of the maximum number of regions that can be used for metering in 2159 auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF); 2160 this corresponds to the the maximum number of elements in 2161 android.control.aeRegions, android.control.awbRegions, 2162 and android.control.afRegions. 2163 </description> 2164 <range> 2165 Value must be &gt;= 0 for each element. For full-capability devices 2166 this value must be &gt;= 1 for AE and AF. The order of the elements is: 2167 `(AE, AWB, AF)`.</range> 2168 <tag id="BC" /> 2169 </entry> 2170 <entry name="maxRegionsAe" type="int32" visibility="java_public" 2171 synthetic="true" hwlevel="legacy"> 2172 <description> 2173 The maximum number of metering regions that can be used by the auto-exposure (AE) 2174 routine. 2175 </description> 2176 <range>Value will be &gt;= 0. For FULL-capability devices, this 2177 value will be &gt;= 1. 2178 </range> 2179 <details> 2180 This corresponds to the the maximum allowed number of elements in 2181 android.control.aeRegions. 2182 </details> 2183 <hal_details>This entry is private to the framework. Fill in 2184 maxRegions to have this entry be automatically populated. 2185 </hal_details> 2186 </entry> 2187 <entry name="maxRegionsAwb" type="int32" visibility="java_public" 2188 synthetic="true" hwlevel="legacy"> 2189 <description> 2190 The maximum number of metering regions that can be used by the auto-white balance (AWB) 2191 routine. 2192 </description> 2193 <range>Value will be &gt;= 0. 2194 </range> 2195 <details> 2196 This corresponds to the the maximum allowed number of elements in 2197 android.control.awbRegions. 2198 </details> 2199 <hal_details>This entry is private to the framework. Fill in 2200 maxRegions to have this entry be automatically populated. 2201 </hal_details> 2202 </entry> 2203 <entry name="maxRegionsAf" type="int32" visibility="java_public" 2204 synthetic="true" hwlevel="legacy"> 2205 <description> 2206 The maximum number of metering regions that can be used by the auto-focus (AF) routine. 2207 </description> 2208 <range>Value will be &gt;= 0. For FULL-capability devices, this 2209 value will be &gt;= 1. 2210 </range> 2211 <details> 2212 This corresponds to the the maximum allowed number of elements in 2213 android.control.afRegions. 2214 </details> 2215 <hal_details>This entry is private to the framework. Fill in 2216 maxRegions to have this entry be automatically populated. 2217 </hal_details> 2218 </entry> 2219 <entry name="sceneModeOverrides" type="byte" visibility="system" 2220 container="array" hwlevel="limited"> 2221 <array> 2222 <size>3</size> 2223 <size>length(availableSceneModes)</size> 2224 </array> 2225 <description> 2226 Ordered list of auto-exposure, auto-white balance, and auto-focus 2227 settings to use with each available scene mode. 2228 </description> 2229 <range> 2230 For each available scene mode, the list must contain three 2231 entries containing the android.control.aeMode, 2232 android.control.awbMode, and android.control.afMode values used 2233 by the camera device. The entry order is `(aeMode, awbMode, afMode)` 2234 where aeMode has the lowest index position. 2235 </range> 2236 <details> 2237 When a scene mode is enabled, the camera device is expected 2238 to override android.control.aeMode, android.control.awbMode, 2239 and android.control.afMode with its preferred settings for 2240 that scene mode. 2241 2242 The order of this list matches that of availableSceneModes, 2243 with 3 entries for each mode. The overrides listed 2244 for FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported) are ignored, 2245 since for that mode the application-set android.control.aeMode, 2246 android.control.awbMode, and android.control.afMode values are 2247 used instead, matching the behavior when android.control.mode 2248 is set to AUTO. It is recommended that the FACE_PRIORITY and 2249 FACE_PRIORITY_LOW_LIGHT (if supported) overrides should be set to 0. 2250 2251 For example, if availableSceneModes contains 2252 `(FACE_PRIORITY, ACTION, NIGHT)`, then the camera framework 2253 expects sceneModeOverrides to have 9 entries formatted like: 2254 `(0, 0, 0, ON_AUTO_FLASH, AUTO, CONTINUOUS_PICTURE, 2255 ON_AUTO_FLASH, INCANDESCENT, AUTO)`. 2256 </details> 2257 <hal_details> 2258 To maintain backward compatibility, this list will be made available 2259 in the static metadata of the camera service. The camera service will 2260 use these values to set android.control.aeMode, 2261 android.control.awbMode, and android.control.afMode when using a scene 2262 mode other than FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported). 2263 </hal_details> 2264 <tag id="BC" /> 2265 </entry> 2266 </static> 2267 <dynamic> 2268 <entry name="aePrecaptureId" type="int32" visibility="system" deprecated="true"> 2269 <description>The ID sent with the latest 2270 CAMERA2_TRIGGER_PRECAPTURE_METERING call</description> 2271 <deprecation_description> 2272 Removed in camera HAL v3 2273 </deprecation_description> 2274 <details>Must be 0 if no 2275 CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet 2276 by HAL. Always updated even if AE algorithm ignores the 2277 trigger</details> 2278 </entry> 2279 <clone entry="android.control.aeAntibandingMode" kind="controls"> 2280 </clone> 2281 <clone entry="android.control.aeExposureCompensation" kind="controls"> 2282 </clone> 2283 <clone entry="android.control.aeLock" kind="controls"> 2284 </clone> 2285 <clone entry="android.control.aeMode" kind="controls"> 2286 </clone> 2287 <clone entry="android.control.aeRegions" kind="controls"> 2288 </clone> 2289 <clone entry="android.control.aeTargetFpsRange" kind="controls"> 2290 </clone> 2291 <clone entry="android.control.aePrecaptureTrigger" kind="controls"> 2292 </clone> 2293 <entry name="aeState" type="byte" visibility="public" enum="true" 2294 hwlevel="limited"> 2295 <enum> 2296 <value>INACTIVE 2297 <notes>AE is off or recently reset. 2298 2299 When a camera device is opened, it starts in 2300 this state. This is a transient state, the camera device may skip reporting 2301 this state in capture result.</notes></value> 2302 <value>SEARCHING 2303 <notes>AE doesn't yet have a good set of control values 2304 for the current scene. 2305 2306 This is a transient state, the camera device may skip 2307 reporting this state in capture result.</notes></value> 2308 <value>CONVERGED 2309 <notes>AE has a good set of control values for the 2310 current scene.</notes></value> 2311 <value>LOCKED 2312 <notes>AE has been locked.</notes></value> 2313 <value>FLASH_REQUIRED 2314 <notes>AE has a good set of control values, but flash 2315 needs to be fired for good quality still 2316 capture.</notes></value> 2317 <value>PRECAPTURE 2318 <notes>AE has been asked to do a precapture sequence 2319 and is currently executing it. 2320 2321 Precapture can be triggered through setting 2322 android.control.aePrecaptureTrigger to START. Currently 2323 active and completed (if it causes camera device internal AE lock) precapture 2324 metering sequence can be canceled through setting 2325 android.control.aePrecaptureTrigger to CANCEL. 2326 2327 Once PRECAPTURE completes, AE will transition to CONVERGED 2328 or FLASH_REQUIRED as appropriate. This is a transient 2329 state, the camera device may skip reporting this state in 2330 capture result.</notes></value> 2331 </enum> 2332 <description>Current state of the auto-exposure (AE) algorithm.</description> 2333 <details>Switching between or enabling AE modes (android.control.aeMode) always 2334 resets the AE state to INACTIVE. Similarly, switching between android.control.mode, 2335 or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all 2336 the algorithm states to INACTIVE. 2337 2338 The camera device can do several state transitions between two results, if it is 2339 allowed by the state transition table. For example: INACTIVE may never actually be 2340 seen in a result. 2341 2342 The state in the result is the state for this image (in sync with this image): if 2343 AE state becomes CONVERGED, then the image data associated with this result should 2344 be good to use. 2345 2346 Below are state transition tables for different AE modes. 2347 2348 State | Transition Cause | New State | Notes 2349 :------------:|:----------------:|:---------:|:-----------------------: 2350 INACTIVE | | INACTIVE | Camera device auto exposure algorithm is disabled 2351 2352 When android.control.aeMode is AE_MODE_ON*: 2353 2354 State | Transition Cause | New State | Notes 2355 :-------------:|:--------------------------------------------:|:--------------:|:-----------------: 2356 INACTIVE | Camera device initiates AE scan | SEARCHING | Values changing 2357 INACTIVE | android.control.aeLock is ON | LOCKED | Values locked 2358 SEARCHING | Camera device finishes AE scan | CONVERGED | Good values, not changing 2359 SEARCHING | Camera device finishes AE scan | FLASH_REQUIRED | Converged but too dark w/o flash 2360 SEARCHING | android.control.aeLock is ON | LOCKED | Values locked 2361 CONVERGED | Camera device initiates AE scan | SEARCHING | Values changing 2362 CONVERGED | android.control.aeLock is ON | LOCKED | Values locked 2363 FLASH_REQUIRED | Camera device initiates AE scan | SEARCHING | Values changing 2364 FLASH_REQUIRED | android.control.aeLock is ON | LOCKED | Values locked 2365 LOCKED | android.control.aeLock is OFF | SEARCHING | Values not good after unlock 2366 LOCKED | android.control.aeLock is OFF | CONVERGED | Values good after unlock 2367 LOCKED | android.control.aeLock is OFF | FLASH_REQUIRED | Exposure good, but too dark 2368 PRECAPTURE | Sequence done. android.control.aeLock is OFF | CONVERGED | Ready for high-quality capture 2369 PRECAPTURE | Sequence done. android.control.aeLock is ON | LOCKED | Ready for high-quality capture 2370 LOCKED | aeLock is ON and aePrecaptureTrigger is START | LOCKED | Precapture trigger is ignored when AE is already locked 2371 LOCKED | aeLock is ON and aePrecaptureTrigger is CANCEL| LOCKED | Precapture trigger is ignored when AE is already locked 2372 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START | PRECAPTURE | Start AE precapture metering sequence 2373 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL| INACTIVE | Currently active precapture metering sequence is canceled 2374 2375 If the camera device supports AE external flash mode (ON_EXTERNAL_FLASH is included in 2376 android.control.aeAvailableModes), android.control.aeState must be FLASH_REQUIRED after 2377 the camera device finishes AE scan and it's too dark without flash. 2378 2379 For the above table, the camera device may skip reporting any state changes that happen 2380 without application intervention (i.e. mode switch, trigger, locking). Any state that 2381 can be skipped in that manner is called a transient state. 2382 2383 For example, for above AE modes (AE_MODE_ON*), in addition to the state transitions 2384 listed in above table, it is also legal for the camera device to skip one or more 2385 transient states between two results. See below table for examples: 2386 2387 State | Transition Cause | New State | Notes 2388 :-------------:|:-----------------------------------------------------------:|:--------------:|:-----------------: 2389 INACTIVE | Camera device finished AE scan | CONVERGED | Values are already good, transient states are skipped by camera device. 2390 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device. 2391 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | CONVERGED | Converged after a precapture sequence, transient states are skipped by camera device. 2392 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device. 2393 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged | CONVERGED | Converged after a precapture sequenceis canceled, transient states are skipped by camera device. 2394 CONVERGED | Camera device finished AE scan | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device. 2395 FLASH_REQUIRED | Camera device finished AE scan | CONVERGED | Converged after a new scan, transient states are skipped by camera device. 2396 </details> 2397 </entry> 2398 <clone entry="android.control.afMode" kind="controls"> 2399 </clone> 2400 <clone entry="android.control.afRegions" kind="controls"> 2401 </clone> 2402 <clone entry="android.control.afTrigger" kind="controls"> 2403 </clone> 2404 <entry name="afState" type="byte" visibility="public" enum="true" 2405 hwlevel="legacy"> 2406 <enum> 2407 <value>INACTIVE 2408 <notes>AF is off or has not yet tried to scan/been asked 2409 to scan. 2410 2411 When a camera device is opened, it starts in this 2412 state. This is a transient state, the camera device may 2413 skip reporting this state in capture 2414 result.</notes></value> 2415 <value>PASSIVE_SCAN 2416 <notes>AF is currently performing an AF scan initiated the 2417 camera device in a continuous autofocus mode. 2418 2419 Only used by CONTINUOUS_* AF modes. This is a transient 2420 state, the camera device may skip reporting this state in 2421 capture result.</notes></value> 2422 <value>PASSIVE_FOCUSED 2423 <notes>AF currently believes it is in focus, but may 2424 restart scanning at any time. 2425 2426 Only used by CONTINUOUS_* AF modes. This is a transient 2427 state, the camera device may skip reporting this state in 2428 capture result.</notes></value> 2429 <value>ACTIVE_SCAN 2430 <notes>AF is performing an AF scan because it was 2431 triggered by AF trigger. 2432 2433 Only used by AUTO or MACRO AF modes. This is a transient 2434 state, the camera device may skip reporting this state in 2435 capture result.</notes></value> 2436 <value>FOCUSED_LOCKED 2437 <notes>AF believes it is focused correctly and has locked 2438 focus. 2439 2440 This state is reached only after an explicit START AF trigger has been 2441 sent (android.control.afTrigger), when good focus has been obtained. 2442 2443 The lens will remain stationary until the AF mode (android.control.afMode) is changed or 2444 a new AF trigger is sent to the camera device (android.control.afTrigger). 2445 </notes></value> 2446 <value>NOT_FOCUSED_LOCKED 2447 <notes>AF has failed to focus successfully and has locked 2448 focus. 2449 2450 This state is reached only after an explicit START AF trigger has been 2451 sent (android.control.afTrigger), when good focus cannot be obtained. 2452 2453 The lens will remain stationary until the AF mode (android.control.afMode) is changed or 2454 a new AF trigger is sent to the camera device (android.control.afTrigger). 2455 </notes></value> 2456 <value>PASSIVE_UNFOCUSED 2457 <notes>AF finished a passive scan without finding focus, 2458 and may restart scanning at any time. 2459 2460 Only used by CONTINUOUS_* AF modes. This is a transient state, the camera 2461 device may skip reporting this state in capture result. 2462 2463 LEGACY camera devices do not support this state. When a passive 2464 scan has finished, it will always go to PASSIVE_FOCUSED. 2465 </notes></value> 2466 </enum> 2467 <description>Current state of auto-focus (AF) algorithm.</description> 2468 <details> 2469 Switching between or enabling AF modes (android.control.afMode) always 2470 resets the AF state to INACTIVE. Similarly, switching between android.control.mode, 2471 or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all 2472 the algorithm states to INACTIVE. 2473 2474 The camera device can do several state transitions between two results, if it is 2475 allowed by the state transition table. For example: INACTIVE may never actually be 2476 seen in a result. 2477 2478 The state in the result is the state for this image (in sync with this image): if 2479 AF state becomes FOCUSED, then the image data associated with this result should 2480 be sharp. 2481 2482 Below are state transition tables for different AF modes. 2483 2484 When android.control.afMode is AF_MODE_OFF or AF_MODE_EDOF: 2485 2486 State | Transition Cause | New State | Notes 2487 :------------:|:----------------:|:---------:|:-----------: 2488 INACTIVE | | INACTIVE | Never changes 2489 2490 When android.control.afMode is AF_MODE_AUTO or AF_MODE_MACRO: 2491 2492 State | Transition Cause | New State | Notes 2493 :-----------------:|:----------------:|:------------------:|:--------------: 2494 INACTIVE | AF_TRIGGER | ACTIVE_SCAN | Start AF sweep, Lens now moving 2495 ACTIVE_SCAN | AF sweep done | FOCUSED_LOCKED | Focused, Lens now locked 2496 ACTIVE_SCAN | AF sweep done | NOT_FOCUSED_LOCKED | Not focused, Lens now locked 2497 ACTIVE_SCAN | AF_CANCEL | INACTIVE | Cancel/reset AF, Lens now locked 2498 FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF 2499 FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep, Lens now moving 2500 NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF 2501 NOT_FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep, Lens now moving 2502 Any state | Mode change | INACTIVE | 2503 2504 For the above table, the camera device may skip reporting any state changes that happen 2505 without application intervention (i.e. mode switch, trigger, locking). Any state that 2506 can be skipped in that manner is called a transient state. 2507 2508 For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the 2509 state transitions listed in above table, it is also legal for the camera device to skip 2510 one or more transient states between two results. See below table for examples: 2511 2512 State | Transition Cause | New State | Notes 2513 :-----------------:|:----------------:|:------------------:|:--------------: 2514 INACTIVE | AF_TRIGGER | FOCUSED_LOCKED | Focus is already good or good after a scan, lens is now locked. 2515 INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | Focus failed after a scan, lens is now locked. 2516 FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | Focus is already good or good after a scan, lens is now locked. 2517 NOT_FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | Focus is good after a scan, lens is not locked. 2518 2519 2520 When android.control.afMode is AF_MODE_CONTINUOUS_VIDEO: 2521 2522 State | Transition Cause | New State | Notes 2523 :-----------------:|:-----------------------------------:|:------------------:|:--------------: 2524 INACTIVE | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving 2525 INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query, Lens now locked 2526 PASSIVE_SCAN | Camera device completes current scan| PASSIVE_FOCUSED | End AF scan, Lens now locked 2527 PASSIVE_SCAN | Camera device fails current scan | PASSIVE_UNFOCUSED | End AF scan, Lens now locked 2528 PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Immediate transition, if focus is good. Lens now locked 2529 PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate transition, if focus is bad. Lens now locked 2530 PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens position, Lens now locked 2531 PASSIVE_FOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving 2532 PASSIVE_UNFOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving 2533 PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate transition, lens now locked 2534 PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate transition, lens now locked 2535 FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect 2536 FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan 2537 NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect 2538 NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan 2539 2540 When android.control.afMode is AF_MODE_CONTINUOUS_PICTURE: 2541 2542 State | Transition Cause | New State | Notes 2543 :-----------------:|:------------------------------------:|:------------------:|:--------------: 2544 INACTIVE | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving 2545 INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query, Lens now locked 2546 PASSIVE_SCAN | Camera device completes current scan | PASSIVE_FOCUSED | End AF scan, Lens now locked 2547 PASSIVE_SCAN | Camera device fails current scan | PASSIVE_UNFOCUSED | End AF scan, Lens now locked 2548 PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Eventual transition once the focus is good. Lens now locked 2549 PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Eventual transition if cannot find focus. Lens now locked 2550 PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens position, Lens now locked 2551 PASSIVE_FOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving 2552 PASSIVE_UNFOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving 2553 PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate trans. Lens now locked 2554 PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate trans. Lens now locked 2555 FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect 2556 FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan 2557 NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect 2558 NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan 2559 2560 When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO 2561 (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the 2562 camera device. When a trigger is included in a mode switch request, the trigger 2563 will be evaluated in the context of the new mode in the request. 2564 See below table for examples: 2565 2566 State | Transition Cause | New State | Notes 2567 :-----------:|:--------------------------------------:|:----------------------------------------:|:--------------: 2568 any state | CAF-->AUTO mode switch | INACTIVE | Mode switch without trigger, initial state must be INACTIVE 2569 any state | CAF-->AUTO mode switch with AF_TRIGGER | trigger-reachable states from INACTIVE | Mode switch with trigger, INACTIVE is skipped 2570 any state | AUTO-->CAF mode switch | passively reachable states from INACTIVE | Mode switch without trigger, passive transient state is skipped 2571 </details> 2572 </entry> 2573 <entry name="afTriggerId" type="int32" visibility="system" deprecated="true"> 2574 <description>The ID sent with the latest 2575 CAMERA2_TRIGGER_AUTOFOCUS call</description> 2576 <deprecation_description> 2577 Removed in camera HAL v3 2578 </deprecation_description> 2579 <details>Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger 2580 received yet by HAL. Always updated even if AF algorithm 2581 ignores the trigger</details> 2582 </entry> 2583 <clone entry="android.control.awbLock" kind="controls"> 2584 </clone> 2585 <clone entry="android.control.awbMode" kind="controls"> 2586 </clone> 2587 <clone entry="android.control.awbRegions" kind="controls"> 2588 </clone> 2589 <clone entry="android.control.captureIntent" kind="controls"> 2590 </clone> 2591 <entry name="awbState" type="byte" visibility="public" enum="true" 2592 hwlevel="limited"> 2593 <enum> 2594 <value>INACTIVE 2595 <notes>AWB is not in auto mode, or has not yet started metering. 2596 2597 When a camera device is opened, it starts in this 2598 state. This is a transient state, the camera device may 2599 skip reporting this state in capture 2600 result.</notes></value> 2601 <value>SEARCHING 2602 <notes>AWB doesn't yet have a good set of control 2603 values for the current scene. 2604 2605 This is a transient state, the camera device 2606 may skip reporting this state in capture result.</notes></value> 2607 <value>CONVERGED 2608 <notes>AWB has a good set of control values for the 2609 current scene.</notes></value> 2610 <value>LOCKED 2611 <notes>AWB has been locked. 2612 </notes></value> 2613 </enum> 2614 <description>Current state of auto-white balance (AWB) algorithm.</description> 2615 <details>Switching between or enabling AWB modes (android.control.awbMode) always 2616 resets the AWB state to INACTIVE. Similarly, switching between android.control.mode, 2617 or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all 2618 the algorithm states to INACTIVE. 2619 2620 The camera device can do several state transitions between two results, if it is 2621 allowed by the state transition table. So INACTIVE may never actually be seen in 2622 a result. 2623 2624 The state in the result is the state for this image (in sync with this image): if 2625 AWB state becomes CONVERGED, then the image data associated with this result should 2626 be good to use. 2627 2628 Below are state transition tables for different AWB modes. 2629 2630 When `android.control.awbMode != AWB_MODE_AUTO`: 2631 2632 State | Transition Cause | New State | Notes 2633 :------------:|:----------------:|:---------:|:-----------------------: 2634 INACTIVE | |INACTIVE |Camera device auto white balance algorithm is disabled 2635 2636 When android.control.awbMode is AWB_MODE_AUTO: 2637 2638 State | Transition Cause | New State | Notes 2639 :-------------:|:--------------------------------:|:-------------:|:-----------------: 2640 INACTIVE | Camera device initiates AWB scan | SEARCHING | Values changing 2641 INACTIVE | android.control.awbLock is ON | LOCKED | Values locked 2642 SEARCHING | Camera device finishes AWB scan | CONVERGED | Good values, not changing 2643 SEARCHING | android.control.awbLock is ON | LOCKED | Values locked 2644 CONVERGED | Camera device initiates AWB scan | SEARCHING | Values changing 2645 CONVERGED | android.control.awbLock is ON | LOCKED | Values locked 2646 LOCKED | android.control.awbLock is OFF | SEARCHING | Values not good after unlock 2647 2648 For the above table, the camera device may skip reporting any state changes that happen 2649 without application intervention (i.e. mode switch, trigger, locking). Any state that 2650 can be skipped in that manner is called a transient state. 2651 2652 For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions 2653 listed in above table, it is also legal for the camera device to skip one or more 2654 transient states between two results. See below table for examples: 2655 2656 State | Transition Cause | New State | Notes 2657 :-------------:|:--------------------------------:|:-------------:|:-----------------: 2658 INACTIVE | Camera device finished AWB scan | CONVERGED | Values are already good, transient states are skipped by camera device. 2659 LOCKED | android.control.awbLock is OFF | CONVERGED | Values good after unlock, transient states are skipped by camera device. 2660 </details> 2661 </entry> 2662 <clone entry="android.control.effectMode" kind="controls"> 2663 </clone> 2664 <clone entry="android.control.mode" kind="controls"> 2665 </clone> 2666 <clone entry="android.control.sceneMode" kind="controls"> 2667 </clone> 2668 <clone entry="android.control.videoStabilizationMode" kind="controls"> 2669 </clone> 2670 </dynamic> 2671 <static> 2672 <entry name="availableHighSpeedVideoConfigurations" type="int32" visibility="hidden" 2673 container="array" typedef="highSpeedVideoConfiguration" hwlevel="limited"> 2674 <array> 2675 <size>5</size> 2676 <size>n</size> 2677 </array> 2678 <description> 2679 List of available high speed video size, fps range and max batch size configurations 2680 supported by the camera device, in the format of (width, height, fps_min, fps_max, batch_size_max). 2681 </description> 2682 <range> 2683 For each configuration, the fps_max &gt;= 120fps. 2684 </range> 2685 <details> 2686 When CONSTRAINED_HIGH_SPEED_VIDEO is supported in android.request.availableCapabilities, 2687 this metadata will list the supported high speed video size, fps range and max batch size 2688 configurations. All the sizes listed in this configuration will be a subset of the sizes 2689 reported by {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} 2690 for processed non-stalling formats. 2691 2692 For the high speed video use case, the application must 2693 select the video size and fps range from this metadata to configure the recording and 2694 preview streams and setup the recording requests. For example, if the application intends 2695 to do high speed recording, it can select the maximum size reported by this metadata to 2696 configure output streams. Once the size is selected, application can filter this metadata 2697 by selected size and get the supported fps ranges, and use these fps ranges to setup the 2698 recording requests. Note that for the use case of multiple output streams, application 2699 must select one unique size from this metadata to use (e.g., preview and recording streams 2700 must have the same size). Otherwise, the high speed capture session creation will fail. 2701 2702 The min and max fps will be multiple times of 30fps. 2703 2704 High speed video streaming extends significant performance pressue to camera hardware, 2705 to achieve efficient high speed streaming, the camera device may have to aggregate 2706 multiple frames together and send to camera device for processing where the request 2707 controls are same for all the frames in this batch. Max batch size indicates 2708 the max possible number of frames the camera device will group together for this high 2709 speed stream configuration. This max batch size will be used to generate a high speed 2710 recording request list by 2711 {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}. 2712 The max batch size for each configuration will satisfy below conditions: 2713 2714 * Each max batch size will be a divisor of its corresponding fps_max / 30. For example, 2715 if max_fps is 300, max batch size will only be 1, 2, 5, or 10. 2716 * The camera device may choose smaller internal batch size for each configuration, but 2717 the actual batch size will be a divisor of max batch size. For example, if the max batch 2718 size is 8, the actual batch size used by camera device will only be 1, 2, 4, or 8. 2719 * The max batch size in each configuration entry must be no larger than 32. 2720 2721 The camera device doesn't have to support batch mode to achieve high speed video recording, 2722 in such case, batch_size_max will be reported as 1 in each configuration entry. 2723 2724 This fps ranges in this configuration list can only be used to create requests 2725 that are submitted to a high speed camera capture session created by 2726 {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. 2727 The fps ranges reported in this metadata must not be used to setup capture requests for 2728 normal capture session, or it will cause request error. 2729 </details> 2730 <hal_details> 2731 All the sizes listed in this configuration will be a subset of the sizes reported by 2732 android.scaler.availableStreamConfigurations for processed non-stalling output formats. 2733 Note that for all high speed video configurations, HAL must be able to support a minimum 2734 of two streams, though the application might choose to configure just one stream. 2735 2736 The HAL may support multiple sensor modes for high speed outputs, for example, 120fps 2737 sensor mode and 120fps recording, 240fps sensor mode for 240fps recording. The application 2738 usually starts preview first, then starts recording. To avoid sensor mode switch caused 2739 stutter when starting recording as much as possible, the application may want to ensure 2740 the same sensor mode is used for preview and recording. Therefore, The HAL must advertise 2741 the variable fps range [30, fps_max] for each fixed fps range in this configuration list. 2742 For example, if the HAL advertises [120, 120] and [240, 240], the HAL must also advertise 2743 [30, 120] and [30, 240] for each configuration. In doing so, if the application intends to 2744 do 120fps recording, it can select [30, 120] to start preview, and [120, 120] to start 2745 recording. For these variable fps ranges, it's up to the HAL to decide the actual fps 2746 values that are suitable for smooth preview streaming. If the HAL sees different max_fps 2747 values that fall into different sensor modes in a sequence of requests, the HAL must 2748 switch the sensor mode as quick as possible to minimize the mode switch caused stutter. 2749 </hal_details> 2750 <tag id="V1" /> 2751 </entry> 2752 <entry name="aeLockAvailable" type="byte" visibility="public" enum="true" 2753 typedef="boolean" hwlevel="legacy"> 2754 <enum> 2755 <value>FALSE</value> 2756 <value>TRUE</value> 2757 </enum> 2758 <description>Whether the camera device supports android.control.aeLock</description> 2759 <details> 2760 Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always 2761 list `true`. This includes FULL devices. 2762 </details> 2763 <tag id="BC"/> 2764 </entry> 2765 <entry name="awbLockAvailable" type="byte" visibility="public" enum="true" 2766 typedef="boolean" hwlevel="legacy"> 2767 <enum> 2768 <value>FALSE</value> 2769 <value>TRUE</value> 2770 </enum> 2771 <description>Whether the camera device supports android.control.awbLock</description> 2772 <details> 2773 Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will 2774 always list `true`. This includes FULL devices. 2775 </details> 2776 <tag id="BC"/> 2777 </entry> 2778 <entry name="availableModes" type="byte" visibility="public" 2779 type_notes="List of enums (android.control.mode)." container="array" 2780 typedef="enumList" hwlevel="legacy"> 2781 <array> 2782 <size>n</size> 2783 </array> 2784 <description> 2785 List of control modes for android.control.mode that are supported by this camera 2786 device. 2787 </description> 2788 <range>Any value listed in android.control.mode</range> 2789 <details> 2790 This list contains control modes that can be set for the camera device. 2791 LEGACY mode devices will always support AUTO mode. LIMITED and FULL 2792 devices will always support OFF, AUTO modes. 2793 </details> 2794 </entry> 2795 <entry name="postRawSensitivityBoostRange" type="int32" visibility="public" 2796 type_notes="Range of supported post RAW sensitivitiy boosts" 2797 container="array" typedef="rangeInt"> 2798 <array> 2799 <size>2</size> 2800 </array> 2801 <description>Range of boosts for android.control.postRawSensitivityBoost supported 2802 by this camera device. 2803 </description> 2804 <units>ISO arithmetic units, the same as android.sensor.sensitivity</units> 2805 <details> 2806 Devices support post RAW sensitivity boost will advertise 2807 android.control.postRawSensitivityBoost key for controling 2808 post RAW sensitivity boost. 2809 2810 This key will be `null` for devices that do not support any RAW format 2811 outputs. For devices that do support RAW format outputs, this key will always 2812 present, and if a device does not support post RAW sensitivity boost, it will 2813 list `(100, 100)` in this key. 2814 </details> 2815 <hal_details> 2816 This key is added in legacy HAL3.4. For legacy HAL3.3 or earlier devices, camera 2817 framework will generate this key as `(100, 100)` if device supports any of RAW output 2818 formats. All legacy HAL3.4 and above devices should list this key if device supports 2819 any of RAW output formats. 2820 </hal_details> 2821 </entry> 2822 </static> 2823 <controls> 2824 <entry name="postRawSensitivityBoost" type="int32" visibility="public"> 2825 <description>The amount of additional sensitivity boost applied to output images 2826 after RAW sensor data is captured. 2827 </description> 2828 <units>ISO arithmetic units, the same as android.sensor.sensitivity</units> 2829 <range>android.control.postRawSensitivityBoostRange</range> 2830 <details> 2831 Some camera devices support additional digital sensitivity boosting in the 2832 camera processing pipeline after sensor RAW image is captured. 2833 Such a boost will be applied to YUV/JPEG format output images but will not 2834 have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE. 2835 2836 This key will be `null` for devices that do not support any RAW format 2837 outputs. For devices that do support RAW format outputs, this key will always 2838 present, and if a device does not support post RAW sensitivity boost, it will 2839 list `100` in this key. 2840 2841 If the camera device cannot apply the exact boost requested, it will reduce the 2842 boost to the nearest supported value. 2843 The final boost value used will be available in the output capture result. 2844 2845 For devices that support post RAW sensitivity boost, the YUV/JPEG output images 2846 of such device will have the total sensitivity of 2847 `android.sensor.sensitivity * android.control.postRawSensitivityBoost / 100` 2848 The sensitivity of RAW format images will always be `android.sensor.sensitivity` 2849 2850 This control is only effective if android.control.aeMode or android.control.mode is set to 2851 OFF; otherwise the auto-exposure algorithm will override this value. 2852 </details> 2853 </entry> 2854 </controls> 2855 <dynamic> 2856 <clone entry="android.control.postRawSensitivityBoost" kind="controls"> 2857 </clone> 2858 </dynamic> 2859 <controls> 2860 <entry name="enableZsl" type="byte" visibility="public" enum="true" typedef="boolean"> 2861 <enum> 2862 <value>FALSE 2863 <notes>Requests with android.control.captureIntent == STILL_CAPTURE must be captured 2864 after previous requests.</notes></value> 2865 <value>TRUE 2866 <notes>Requests with android.control.captureIntent == STILL_CAPTURE may or may not be 2867 captured before previous requests.</notes></value> 2868 </enum> 2869 <description>Allow camera device to enable zero-shutter-lag mode for requests with 2870 android.control.captureIntent == STILL_CAPTURE. 2871 </description> 2872 <details> 2873 If enableZsl is `true`, the camera device may enable zero-shutter-lag mode for requests with 2874 STILL_CAPTURE capture intent. The camera device may use images captured in the past to 2875 produce output images for a zero-shutter-lag request. The result metadata including the 2876 android.sensor.timestamp reflects the source frames used to produce output images. 2877 Therefore, the contents of the output images and the result metadata may be out of order 2878 compared to previous regular requests. enableZsl does not affect requests with other 2879 capture intents. 2880 2881 For example, when requests are submitted in the following order: 2882 Request A: enableZsl is ON, android.control.captureIntent is PREVIEW 2883 Request B: enableZsl is ON, android.control.captureIntent is STILL_CAPTURE 2884 2885 The output images for request B may have contents captured before the output images for 2886 request A, and the result metadata for request B may be older than the result metadata for 2887 request A. 2888 2889 Note that when enableZsl is `true`, it is not guaranteed to get output images captured in 2890 the past for requests with STILL_CAPTURE capture intent. 2891 2892 For applications targeting SDK versions O and newer, the value of enableZsl in 2893 TEMPLATE_STILL_CAPTURE template may be `true`. The value in other templates is always 2894 `false` if present. 2895 2896 For applications targeting SDK versions older than O, the value of enableZsl in all 2897 capture templates is always `false` if present. 2898 2899 For application-operated ZSL, use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template. 2900 </details> 2901 <hal_details> 2902 It is valid for HAL to produce regular output images for requests with STILL_CAPTURE 2903 capture intent. 2904 </hal_details> 2905 </entry> 2906 </controls> 2907 <dynamic> 2908 <clone entry="android.control.enableZsl" kind="controls"> 2909 </clone> 2910 <entry name="afSceneChange" type="byte" visibility="public" enum="true" hal_version="3.3"> 2911 <enum> 2912 <value>NOT_DETECTED 2913 <notes>Scene change is not detected within the AF region(s).</notes></value> 2914 <value>DETECTED 2915 <notes>Scene change is detected within the AF region(s).</notes></value> 2916 </enum> 2917 <description>Whether a significant scene change is detected within the currently-set AF 2918 region(s).</description> 2919 <details>When the camera focus routine detects a change in the scene it is looking at, 2920 such as a large shift in camera viewpoint, significant motion in the scene, or a 2921 significant illumination change, this value will be set to DETECTED for a single capture 2922 result. Otherwise the value will be NOT_DETECTED. The threshold for detection is similar 2923 to what would trigger a new passive focus scan to begin in CONTINUOUS autofocus modes. 2924 2925 This key will be available if the camera device advertises this key via {@link 2926 android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}. 2927 </details> 2928 </entry> 2929 </dynamic> 2930 </section> 2931 <section name="demosaic"> 2932 <controls> 2933 <entry name="mode" type="byte" enum="true"> 2934 <enum> 2935 <value>FAST 2936 <notes>Minimal or no slowdown of frame rate compared to 2937 Bayer RAW output.</notes></value> 2938 <value>HIGH_QUALITY 2939 <notes>Improved processing quality but the frame rate might be slowed down 2940 relative to raw output.</notes></value> 2941 </enum> 2942 <description>Controls the quality of the demosaicing 2943 processing.</description> 2944 <tag id="FUTURE" /> 2945 </entry> 2946 </controls> 2947 </section> 2948 <section name="edge"> 2949 <controls> 2950 <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full"> 2951 <enum> 2952 <value>OFF 2953 <notes>No edge enhancement is applied.</notes></value> 2954 <value>FAST 2955 <notes>Apply edge enhancement at a quality level that does not slow down frame rate 2956 relative to sensor output. It may be the same as OFF if edge enhancement will 2957 slow down frame rate relative to sensor.</notes></value> 2958 <value>HIGH_QUALITY 2959 <notes>Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate. 2960 </notes></value> 2961 <value optional="true">ZERO_SHUTTER_LAG <notes>Edge enhancement is applied at different 2962 levels for different output streams, based on resolution. Streams at maximum recording 2963 resolution (see {@link 2964 android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession}) 2965 or below have edge enhancement applied, while higher-resolution streams have no edge 2966 enhancement applied. The level of edge enhancement for low-resolution streams is tuned 2967 so that frame rate is not impacted, and the quality is equal to or better than FAST 2968 (since it is only applied to lower-resolution outputs, quality may improve from FAST). 2969 2970 This mode is intended to be used by applications operating in a zero-shutter-lag mode 2971 with YUV or PRIVATE reprocessing, where the application continuously captures 2972 high-resolution intermediate buffers into a circular buffer, from which a final image is 2973 produced via reprocessing when a user takes a picture. For such a use case, the 2974 high-resolution buffers must not have edge enhancement applied to maximize efficiency of 2975 preview and to avoid double-applying enhancement when reprocessed, while low-resolution 2976 buffers (used for recording or preview, generally) need edge enhancement applied for 2977 reasonable preview quality. 2978 2979 This mode is guaranteed to be supported by devices that support either the 2980 YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities 2981 (android.request.availableCapabilities lists either of those capabilities) and it will 2982 be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template. 2983 </notes></value> 2984 </enum> 2985 <description>Operation mode for edge 2986 enhancement.</description> 2987 <range>android.edge.availableEdgeModes</range> 2988 <details>Edge enhancement improves sharpness and details in the captured image. OFF means 2989 no enhancement will be applied by the camera device. 2990 2991 FAST/HIGH_QUALITY both mean camera device determined enhancement 2992 will be applied. HIGH_QUALITY mode indicates that the 2993 camera device will use the highest-quality enhancement algorithms, 2994 even if it slows down capture rate. FAST means the camera device will 2995 not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if 2996 edge enhancement will slow down capture rate. Every output stream will have a similar 2997 amount of enhancement applied. 2998 2999 ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular 3000 buffer of high-resolution images during preview and reprocess image(s) from that buffer 3001 into a final capture when triggered by the user. In this mode, the camera device applies 3002 edge enhancement to low-resolution streams (below maximum recording resolution) to 3003 maximize preview quality, but does not apply edge enhancement to high-resolution streams, 3004 since those will be reprocessed later if necessary. 3005 3006 For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera 3007 device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively. 3008 The camera device may adjust its internal edge enhancement parameters for best 3009 image quality based on the android.reprocess.effectiveExposureFactor, if it is set. 3010 </details> 3011 <hal_details> 3012 For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to 3013 adjust the internal edge enhancement reduction parameters appropriately to get the best 3014 quality images. 3015 </hal_details> 3016 <tag id="V1" /> 3017 <tag id="REPROC" /> 3018 </entry> 3019 <entry name="strength" type="byte"> 3020 <description>Control the amount of edge enhancement 3021 applied to the images</description> 3022 <units>1-10; 10 is maximum sharpening</units> 3023 <tag id="FUTURE" /> 3024 </entry> 3025 </controls> 3026 <static> 3027 <entry name="availableEdgeModes" type="byte" visibility="public" 3028 type_notes="list of enums" container="array" typedef="enumList" 3029 hwlevel="full"> 3030 <array> 3031 <size>n</size> 3032 </array> 3033 <description> 3034 List of edge enhancement modes for android.edge.mode that are supported by this camera 3035 device. 3036 </description> 3037 <range>Any value listed in android.edge.mode</range> 3038 <details> 3039 Full-capability camera devices must always support OFF; camera devices that support 3040 YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will 3041 list FAST. 3042 </details> 3043 <hal_details> 3044 HAL must support both FAST and HIGH_QUALITY if edge enhancement control is available 3045 on the camera device, but the underlying implementation can be the same for both modes. 3046 That is, if the highest quality implementation on the camera device does not slow down 3047 capture rate, then FAST and HIGH_QUALITY will generate the same output. 3048 </hal_details> 3049 <tag id="V1" /> 3050 <tag id="REPROC" /> 3051 </entry> 3052 </static> 3053 <dynamic> 3054 <clone entry="android.edge.mode" kind="controls"> 3055 <tag id="V1" /> 3056 <tag id="REPROC" /> 3057 </clone> 3058 </dynamic> 3059 </section> 3060 <section name="flash"> 3061 <controls> 3062 <entry name="firingPower" type="byte"> 3063 <description>Power for flash firing/torch</description> 3064 <units>10 is max power; 0 is no flash. Linear</units> 3065 <range>0 - 10</range> 3066 <details>Power for snapshot may use a different scale than 3067 for torch mode. Only one entry for torch mode will be 3068 used</details> 3069 <tag id="FUTURE" /> 3070 </entry> 3071 <entry name="firingTime" type="int64"> 3072 <description>Firing time of flash relative to start of 3073 exposure</description> 3074 <units>nanoseconds</units> 3075 <range>0-(exposure time-flash duration)</range> 3076 <details>Clamped to (0, exposure time - flash 3077 duration).</details> 3078 <tag id="FUTURE" /> 3079 </entry> 3080 <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="legacy"> 3081 <enum> 3082 <value>OFF 3083 <notes> 3084 Do not fire the flash for this capture. 3085 </notes> 3086 </value> 3087 <value>SINGLE 3088 <notes> 3089 If the flash is available and charged, fire flash 3090 for this capture. 3091 </notes> 3092 </value> 3093 <value>TORCH 3094 <notes> 3095 Transition flash to continuously on. 3096 </notes> 3097 </value> 3098 </enum> 3099 <description>The desired mode for for the camera device's flash control.</description> 3100 <details> 3101 This control is only effective when flash unit is available 3102 (`android.flash.info.available == true`). 3103 3104 When this control is used, the android.control.aeMode must be set to ON or OFF. 3105 Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH, 3106 ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control. 3107 3108 When set to OFF, the camera device will not fire flash for this capture. 3109 3110 When set to SINGLE, the camera device will fire flash regardless of the camera 3111 device's auto-exposure routine's result. When used in still capture case, this 3112 control should be used along with auto-exposure (AE) precapture metering sequence 3113 (android.control.aePrecaptureTrigger), otherwise, the image may be incorrectly exposed. 3114 3115 When set to TORCH, the flash will be on continuously. This mode can be used 3116 for use cases such as preview, auto-focus assist, still capture, or video recording. 3117 3118 The flash status will be reported by android.flash.state in the capture result metadata. 3119 </details> 3120 <tag id="BC" /> 3121 </entry> 3122 </controls> 3123 <static> 3124 <namespace name="info"> 3125 <entry name="available" type="byte" visibility="public" enum="true" 3126 typedef="boolean" hwlevel="legacy"> 3127 <enum> 3128 <value>FALSE</value> 3129 <value>TRUE</value> 3130 </enum> 3131 <description>Whether this camera device has a 3132 flash unit.</description> 3133 <details> 3134 Will be `false` if no flash is available. 3135 3136 If there is no flash unit, none of the flash controls do 3137 anything.</details> 3138 <tag id="BC" /> 3139 </entry> 3140 <entry name="chargeDuration" type="int64"> 3141 <description>Time taken before flash can fire 3142 again</description> 3143 <units>nanoseconds</units> 3144 <range>0-1e9</range> 3145 <details>1 second too long/too short for recharge? Should 3146 this be power-dependent?</details> 3147 <tag id="FUTURE" /> 3148 </entry> 3149 </namespace> 3150 <entry name="colorTemperature" type="byte"> 3151 <description>The x,y whitepoint of the 3152 flash</description> 3153 <units>pair of floats</units> 3154 <range>0-1 for both</range> 3155 <tag id="FUTURE" /> 3156 </entry> 3157 <entry name="maxEnergy" type="byte"> 3158 <description>Max energy output of the flash for a full 3159 power single flash</description> 3160 <units>lumen-seconds</units> 3161 <range>&gt;= 0</range> 3162 <tag id="FUTURE" /> 3163 </entry> 3164 </static> 3165 <dynamic> 3166 <clone entry="android.flash.firingPower" kind="controls"> 3167 </clone> 3168 <clone entry="android.flash.firingTime" kind="controls"> 3169 </clone> 3170 <clone entry="android.flash.mode" kind="controls"></clone> 3171 <entry name="state" type="byte" visibility="public" enum="true" 3172 hwlevel="limited"> 3173 <enum> 3174 <value>UNAVAILABLE 3175 <notes>No flash on camera.</notes></value> 3176 <value>CHARGING 3177 <notes>Flash is charging and cannot be fired.</notes></value> 3178 <value>READY 3179 <notes>Flash is ready to fire.</notes></value> 3180 <value>FIRED 3181 <notes>Flash fired for this capture.</notes></value> 3182 <value>PARTIAL 3183 <notes>Flash partially illuminated this frame. 3184 3185 This is usually due to the next or previous frame having 3186 the flash fire, and the flash spilling into this capture 3187 due to hardware limitations.</notes></value> 3188 </enum> 3189 <description>Current state of the flash 3190 unit.</description> 3191 <details> 3192 When the camera device doesn't have flash unit 3193 (i.e. `android.flash.info.available == false`), this state will always be UNAVAILABLE. 3194 Other states indicate the current flash status. 3195 3196 In certain conditions, this will be available on LEGACY devices: 3197 3198 * Flash-less cameras always return UNAVAILABLE. 3199 * Using android.control.aeMode `==` ON_ALWAYS_FLASH 3200 will always return FIRED. 3201 * Using android.flash.mode `==` TORCH 3202 will always return FIRED. 3203 3204 In all other conditions the state will not be available on 3205 LEGACY devices (i.e. it will be `null`). 3206 </details> 3207 </entry> 3208 </dynamic> 3209 </section> 3210 <section name="hotPixel"> 3211 <controls> 3212 <entry name="mode" type="byte" visibility="public" enum="true"> 3213 <enum> 3214 <value>OFF 3215 <notes> 3216 No hot pixel correction is applied. 3217 3218 The frame rate must not be reduced relative to sensor raw output 3219 for this option. 3220 3221 The hotpixel map may be returned in android.statistics.hotPixelMap. 3222 </notes> 3223 </value> 3224 <value>FAST 3225 <notes> 3226 Hot pixel correction is applied, without reducing frame 3227 rate relative to sensor raw output. 3228 3229 The hotpixel map may be returned in android.statistics.hotPixelMap. 3230 </notes> 3231 </value> 3232 <value>HIGH_QUALITY 3233 <notes> 3234 High-quality hot pixel correction is applied, at a cost 3235 of possibly reduced frame rate relative to sensor raw output. 3236 3237 The hotpixel map may be returned in android.statistics.hotPixelMap. 3238 </notes> 3239 </value> 3240 </enum> 3241 <description> 3242 Operational mode for hot pixel correction. 3243 </description> 3244 <range>android.hotPixel.availableHotPixelModes</range> 3245 <details> 3246 Hotpixel correction interpolates out, or otherwise removes, pixels 3247 that do not accurately measure the incoming light (i.e. pixels that 3248 are stuck at an arbitrary value or are oversensitive). 3249 </details> 3250 <tag id="V1" /> 3251 <tag id="RAW" /> 3252 </entry> 3253 </controls> 3254 <static> 3255 <entry name="availableHotPixelModes" type="byte" visibility="public" 3256 type_notes="list of enums" container="array" typedef="enumList"> 3257 <array> 3258 <size>n</size> 3259 </array> 3260 <description> 3261 List of hot pixel correction modes for android.hotPixel.mode that are supported by this 3262 camera device. 3263 </description> 3264 <range>Any value listed in android.hotPixel.mode</range> 3265 <details> 3266 FULL mode camera devices will always support FAST. 3267 </details> 3268 <hal_details> 3269 To avoid performance issues, there will be significantly fewer hot 3270 pixels than actual pixels on the camera sensor. 3271 HAL must support both FAST and HIGH_QUALITY if hot pixel correction control is available 3272 on the camera device, but the underlying implementation can be the same for both modes. 3273 That is, if the highest quality implementation on the camera device does not slow down 3274 capture rate, then FAST and HIGH_QUALITY will generate the same output. 3275 </hal_details> 3276 <tag id="V1" /> 3277 <tag id="RAW" /> 3278 </entry> 3279 </static> 3280 <dynamic> 3281 <clone entry="android.hotPixel.mode" kind="controls"> 3282 <tag id="V1" /> 3283 <tag id="RAW" /> 3284 </clone> 3285 </dynamic> 3286 </section> 3287 <section name="jpeg"> 3288 <controls> 3289 <entry name="gpsLocation" type="byte" visibility="java_public" synthetic="true" 3290 typedef="location" hwlevel="legacy"> 3291 <description> 3292 A location object to use when generating image GPS metadata. 3293 </description> 3294 <details> 3295 Setting a location object in a request will include the GPS coordinates of the location 3296 into any JPEG images captured based on the request. These coordinates can then be 3297 viewed by anyone who receives the JPEG image. 3298 </details> 3299 </entry> 3300 <entry name="gpsCoordinates" type="double" visibility="ndk_public" 3301 type_notes="latitude, longitude, altitude. First two in degrees, the third in meters" 3302 container="array" hwlevel="legacy"> 3303 <array> 3304 <size>3</size> 3305 </array> 3306 <description>GPS coordinates to include in output JPEG 3307 EXIF.</description> 3308 <range>(-180 - 180], [-90,90], [-inf, inf]</range> 3309 <tag id="BC" /> 3310 </entry> 3311 <entry name="gpsProcessingMethod" type="byte" visibility="ndk_public" 3312 typedef="string" hwlevel="legacy"> 3313 <description>32 characters describing GPS algorithm to 3314 include in EXIF.</description> 3315 <units>UTF-8 null-terminated string</units> 3316 <tag id="BC" /> 3317 </entry> 3318 <entry name="gpsTimestamp" type="int64" visibility="ndk_public" hwlevel="legacy"> 3319 <description>Time GPS fix was made to include in 3320 EXIF.</description> 3321 <units>UTC in seconds since January 1, 1970</units> 3322 <tag id="BC" /> 3323 </entry> 3324 <entry name="orientation" type="int32" visibility="public" hwlevel="legacy"> 3325 <description>The orientation for a JPEG image.</description> 3326 <units>Degrees in multiples of 90</units> 3327 <range>0, 90, 180, 270</range> 3328 <details> 3329 The clockwise rotation angle in degrees, relative to the orientation 3330 to the camera, that the JPEG picture needs to be rotated by, to be viewed 3331 upright. 3332 3333 Camera devices may either encode this value into the JPEG EXIF header, or 3334 rotate the image data to match this orientation. When the image data is rotated, 3335 the thumbnail data will also be rotated. 3336 3337 Note that this orientation is relative to the orientation of the camera sensor, given 3338 by android.sensor.orientation. 3339 3340 To translate from the device orientation given by the Android sensor APIs for camera 3341 sensors which are not EXTERNAL, the following sample code may be used: 3342 3343 private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) { 3344 if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0; 3345 int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION); 3346 3347 // Round device orientation to a multiple of 90 3348 deviceOrientation = (deviceOrientation + 45) / 90 * 90; 3349 3350 // Reverse device orientation for front-facing cameras 3351 boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT; 3352 if (facingFront) deviceOrientation = -deviceOrientation; 3353 3354 // Calculate desired JPEG orientation relative to camera orientation to make 3355 // the image upright relative to the device orientation 3356 int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360; 3357 3358 return jpegOrientation; 3359 } 3360 3361 For EXTERNAL cameras the sensor orientation will always be set to 0 and the facing will 3362 also be set to EXTERNAL. The above code is not relevant in such case. 3363 </details> 3364 <tag id="BC" /> 3365 </entry> 3366 <entry name="quality" type="byte" visibility="public" hwlevel="legacy"> 3367 <description>Compression quality of the final JPEG 3368 image.</description> 3369 <range>1-100; larger is higher quality</range> 3370 <details>85-95 is typical usage range.</details> 3371 <tag id="BC" /> 3372 </entry> 3373 <entry name="thumbnailQuality" type="byte" visibility="public" hwlevel="legacy"> 3374 <description>Compression quality of JPEG 3375 thumbnail.</description> 3376 <range>1-100; larger is higher quality</range> 3377 <tag id="BC" /> 3378 </entry> 3379 <entry name="thumbnailSize" type="int32" visibility="public" 3380 container="array" typedef="size" hwlevel="legacy"> 3381 <array> 3382 <size>2</size> 3383 </array> 3384 <description>Resolution of embedded JPEG thumbnail.</description> 3385 <range>android.jpeg.availableThumbnailSizes</range> 3386 <details>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail, 3387 but the captured JPEG will still be a valid image. 3388 3389 For best results, when issuing a request for a JPEG image, the thumbnail size selected 3390 should have the same aspect ratio as the main JPEG output. 3391 3392 If the thumbnail image aspect ratio differs from the JPEG primary image aspect 3393 ratio, the camera device creates the thumbnail by cropping it from the primary image. 3394 For example, if the primary image has 4:3 aspect ratio, the thumbnail image has 3395 16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to 3396 generate the thumbnail image. The thumbnail image will always have a smaller Field 3397 Of View (FOV) than the primary image when aspect ratios differ. 3398 3399 When an android.jpeg.orientation of non-zero degree is requested, 3400 the camera device will handle thumbnail rotation in one of the following ways: 3401 3402 * Set the {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag} 3403 and keep jpeg and thumbnail image data unrotated. 3404 * Rotate the jpeg and thumbnail image data and not set 3405 {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}. In this 3406 case, LIMITED or FULL hardware level devices will report rotated thumnail size in 3407 capture result, so the width and height will be interchanged if 90 or 270 degree 3408 orientation is requested. LEGACY device will always report unrotated thumbnail 3409 size. 3410 </details> 3411 <hal_details> 3412 The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail. 3413 The cropping must be done on the primary jpeg image rather than the sensor active array. 3414 The stream cropping rule specified by "S5. Cropping" in camera3.h doesn't apply to the 3415 thumbnail image cropping. 3416 </hal_details> 3417 <tag id="BC" /> 3418 </entry> 3419 </controls> 3420 <static> 3421 <entry name="availableThumbnailSizes" type="int32" visibility="public" 3422 container="array" typedef="size" hwlevel="legacy"> 3423 <array> 3424 <size>2</size> 3425 <size>n</size> 3426 </array> 3427 <description>List of JPEG thumbnail sizes for android.jpeg.thumbnailSize supported by this 3428 camera device.</description> 3429 <details> 3430 This list will include at least one non-zero resolution, plus `(0,0)` for indicating no 3431 thumbnail should be generated. 3432 3433 Below condiditions will be satisfied for this size list: 3434 3435 * The sizes will be sorted by increasing pixel area (width x height). 3436 If several resolutions have the same area, they will be sorted by increasing width. 3437 * The aspect ratio of the largest thumbnail size will be same as the 3438 aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations. 3439 The largest size is defined as the size that has the largest pixel area 3440 in a given size list. 3441 * Each output JPEG size in android.scaler.availableStreamConfigurations will have at least 3442 one corresponding size that has the same aspect ratio in availableThumbnailSizes, 3443 and vice versa. 3444 * All non-`(0, 0)` sizes will have non-zero widths and heights.</details> 3445 <tag id="BC" /> 3446 </entry> 3447 <entry name="maxSize" type="int32" visibility="system"> 3448 <description>Maximum size in bytes for the compressed 3449 JPEG buffer</description> 3450 <range>Must be large enough to fit any JPEG produced by 3451 the camera</range> 3452 <details>This is used for sizing the gralloc buffers for 3453 JPEG</details> 3454 </entry> 3455 </static> 3456 <dynamic> 3457 <clone entry="android.jpeg.gpsLocation" kind="controls"> 3458 </clone> 3459 <clone entry="android.jpeg.gpsCoordinates" kind="controls"> 3460 </clone> 3461 <clone entry="android.jpeg.gpsProcessingMethod" 3462 kind="controls"></clone> 3463 <clone entry="android.jpeg.gpsTimestamp" kind="controls"> 3464 </clone> 3465 <clone entry="android.jpeg.orientation" kind="controls"> 3466 </clone> 3467 <clone entry="android.jpeg.quality" kind="controls"> 3468 </clone> 3469 <entry name="size" type="int32"> 3470 <description>The size of the compressed JPEG image, in 3471 bytes</description> 3472 <range>&gt;= 0</range> 3473 <details>If no JPEG output is produced for the request, 3474 this must be 0. 3475 3476 Otherwise, this describes the real size of the compressed 3477 JPEG image placed in the output stream. More specifically, 3478 if android.jpeg.maxSize = 1000000, and a specific capture 3479 has android.jpeg.size = 500000, then the output buffer from 3480 the JPEG stream will be 1000000 bytes, of which the first 3481 500000 make up the real data.</details> 3482 <tag id="FUTURE" /> 3483 </entry> 3484 <clone entry="android.jpeg.thumbnailQuality" 3485 kind="controls"></clone> 3486 <clone entry="android.jpeg.thumbnailSize" kind="controls"> 3487 </clone> 3488 </dynamic> 3489 </section> 3490 <section name="lens"> 3491 <controls> 3492 <entry name="aperture" type="float" visibility="public" hwlevel="full"> 3493 <description>The desired lens aperture size, as a ratio of lens focal length to the 3494 effective aperture diameter.</description> 3495 <units>The f-number (f/N)</units> 3496 <range>android.lens.info.availableApertures</range> 3497 <details>Setting this value is only supported on the camera devices that have a variable 3498 aperture lens. 3499 3500 When this is supported and android.control.aeMode is OFF, 3501 this can be set along with android.sensor.exposureTime, 3502 android.sensor.sensitivity, and android.sensor.frameDuration 3503 to achieve manual exposure control. 3504 3505 The requested aperture value may take several frames to reach the 3506 requested value; the camera device will report the current (intermediate) 3507 aperture size in capture result metadata while the aperture is changing. 3508 While the aperture is still changing, android.lens.state will be set to MOVING. 3509 3510 When this is supported and android.control.aeMode is one of 3511 the ON modes, this will be overridden by the camera device 3512 auto-exposure algorithm, the overridden values are then provided 3513 back to the user in the corresponding result.</details> 3514 <tag id="V1" /> 3515 </entry> 3516 <entry name="filterDensity" type="float" visibility="public" hwlevel="full"> 3517 <description> 3518 The desired setting for the lens neutral density filter(s). 3519 </description> 3520 <units>Exposure Value (EV)</units> 3521 <range>android.lens.info.availableFilterDensities</range> 3522 <details> 3523 This control will not be supported on most camera devices. 3524 3525 Lens filters are typically used to lower the amount of light the 3526 sensor is exposed to (measured in steps of EV). As used here, an EV 3527 step is the standard logarithmic representation, which are 3528 non-negative, and inversely proportional to the amount of light 3529 hitting the sensor. For example, setting this to 0 would result 3530 in no reduction of the incoming light, and setting this to 2 would 3531 mean that the filter is set to reduce incoming light by two stops 3532 (allowing 1/4 of the prior amount of light to the sensor). 3533 3534 It may take several frames before the lens filter density changes 3535 to the requested value. While the filter density is still changing, 3536 android.lens.state will be set to MOVING. 3537 </details> 3538 <tag id="V1" /> 3539 </entry> 3540 <entry name="focalLength" type="float" visibility="public" hwlevel="legacy"> 3541 <description> 3542 The desired lens focal length; used for optical zoom. 3543 </description> 3544 <units>Millimeters</units> 3545 <range>android.lens.info.availableFocalLengths</range> 3546 <details> 3547 This setting controls the physical focal length of the camera 3548 device's lens. Changing the focal length changes the field of 3549 view of the camera device, and is usually used for optical zoom. 3550 3551 Like android.lens.focusDistance and android.lens.aperture, this 3552 setting won't be applied instantaneously, and it may take several 3553 frames before the lens can change to the requested focal length. 3554 While the focal length is still changing, android.lens.state will 3555 be set to MOVING. 3556 3557 Optical zoom will not be supported on most devices. 3558 </details> 3559 <tag id="V1" /> 3560 </entry> 3561 <entry name="focusDistance" type="float" visibility="public" hwlevel="full"> 3562 <description>Desired distance to plane of sharpest focus, 3563 measured from frontmost surface of the lens.</description> 3564 <units>See android.lens.info.focusDistanceCalibration for details</units> 3565 <range>&gt;= 0</range> 3566 <details> 3567 This control can be used for setting manual focus, on devices that support 3568 the MANUAL_SENSOR capability and have a variable-focus lens (see 3569 android.lens.info.minimumFocusDistance). 3570 3571 A value of `0.0f` means infinity focus. The value set will be clamped to 3572 `[0.0f, android.lens.info.minimumFocusDistance]`. 3573 3574 Like android.lens.focalLength, this setting won't be applied 3575 instantaneously, and it may take several frames before the lens 3576 can move to the requested focus distance. While the lens is still moving, 3577 android.lens.state will be set to MOVING. 3578 3579 LEGACY devices support at most setting this to `0.0f` 3580 for infinity focus. 3581 </details> 3582 <tag id="BC" /> 3583 <tag id="V1" /> 3584 </entry> 3585 <entry name="opticalStabilizationMode" type="byte" visibility="public" 3586 enum="true" hwlevel="limited"> 3587 <enum> 3588 <value>OFF 3589 <notes>Optical stabilization is unavailable.</notes> 3590 </value> 3591 <value optional="true">ON 3592 <notes>Optical stabilization is enabled.</notes> 3593 </value> 3594 </enum> 3595 <description> 3596 Sets whether the camera device uses optical image stabilization (OIS) 3597 when capturing images. 3598 </description> 3599 <range>android.lens.info.availableOpticalStabilization</range> 3600 <details> 3601 OIS is used to compensate for motion blur due to small 3602 movements of the camera during capture. Unlike digital image 3603 stabilization (android.control.videoStabilizationMode), OIS 3604 makes use of mechanical elements to stabilize the camera 3605 sensor, and thus allows for longer exposure times before 3606 camera shake becomes apparent. 3607 3608 Switching between different optical stabilization modes may take several 3609 frames to initialize, the camera device will report the current mode in 3610 capture result metadata. For example, When "ON" mode is requested, the 3611 optical stabilization modes in the first several capture results may still 3612 be "OFF", and it will become "ON" when the initialization is done. 3613 3614 If a camera device supports both OIS and digital image stabilization 3615 (android.control.videoStabilizationMode), turning both modes on may produce undesirable 3616 interaction, so it is recommended not to enable both at the same time. 3617 3618 Not all devices will support OIS; see 3619 android.lens.info.availableOpticalStabilization for 3620 available controls. 3621 </details> 3622 <tag id="V1" /> 3623 </entry> 3624 </controls> 3625 <static> 3626 <namespace name="info"> 3627 <entry name="availableApertures" type="float" visibility="public" 3628 container="array" hwlevel="full"> 3629 <array> 3630 <size>n</size> 3631 </array> 3632 <description>List of aperture size values for android.lens.aperture that are 3633 supported by this camera device.</description> 3634 <units>The aperture f-number</units> 3635 <details>If the camera device doesn't support a variable lens aperture, 3636 this list will contain only one value, which is the fixed aperture size. 3637 3638 If the camera device supports a variable aperture, the aperture values 3639 in this list will be sorted in ascending order.</details> 3640 <tag id="V1" /> 3641 </entry> 3642 <entry name="availableFilterDensities" type="float" visibility="public" 3643 container="array" hwlevel="full"> 3644 <array> 3645 <size>n</size> 3646 </array> 3647 <description> 3648 List of neutral density filter values for 3649 android.lens.filterDensity that are supported by this camera device. 3650 </description> 3651 <units>Exposure value (EV)</units> 3652 <range> 3653 Values are &gt;= 0 3654 </range> 3655 <details> 3656 If a neutral density filter is not supported by this camera device, 3657 this list will contain only 0. Otherwise, this list will include every 3658 filter density supported by the camera device, in ascending order. 3659 </details> 3660 <tag id="V1" /> 3661 </entry> 3662 <entry name="availableFocalLengths" type="float" visibility="public" 3663 type_notes="The list of available focal lengths" 3664 container="array" hwlevel="legacy"> 3665 <array> 3666 <size>n</size> 3667 </array> 3668 <description> 3669 List of focal lengths for android.lens.focalLength that are supported by this camera 3670 device. 3671 </description> 3672 <units>Millimeters</units> 3673 <range> 3674 Values are &gt; 0 3675 </range> 3676 <details> 3677 If optical zoom is not supported, this list will only contain 3678 a single value corresponding to the fixed focal length of the 3679 device. Otherwise, this list will include every focal length supported 3680 by the camera device, in ascending order. 3681 </details> 3682 <tag id="BC" /> 3683 <tag id="V1" /> 3684 </entry> 3685 <entry name="availableOpticalStabilization" type="byte" 3686 visibility="public" type_notes="list of enums" container="array" 3687 typedef="enumList" hwlevel="limited"> 3688 <array> 3689 <size>n</size> 3690 </array> 3691 <description> 3692 List of optical image stabilization (OIS) modes for 3693 android.lens.opticalStabilizationMode that are supported by this camera device. 3694 </description> 3695 <range>Any value listed in android.lens.opticalStabilizationMode</range> 3696 <details> 3697 If OIS is not supported by a given camera device, this list will 3698 contain only OFF. 3699 </details> 3700 <tag id="V1" /> 3701 </entry> 3702 <entry name="hyperfocalDistance" type="float" visibility="public" optional="true" 3703 hwlevel="limited"> 3704 <description>Hyperfocal distance for this lens.</description> 3705 <units>See android.lens.info.focusDistanceCalibration for details</units> 3706 <range>If lens is fixed focus, &gt;= 0. If lens has focuser unit, the value is 3707 within `(0.0f, android.lens.info.minimumFocusDistance]`</range> 3708 <details> 3709 If the lens is not fixed focus, the camera device will report this 3710 field when android.lens.info.focusDistanceCalibration is APPROXIMATE or CALIBRATED. 3711 </details> 3712 </entry> 3713 <entry name="minimumFocusDistance" type="float" visibility="public" optional="true" 3714 hwlevel="limited"> 3715 <description>Shortest distance from frontmost surface 3716 of the lens that can be brought into sharp focus.</description> 3717 <units>See android.lens.info.focusDistanceCalibration for details</units> 3718 <range>&gt;= 0</range> 3719 <details>If the lens is fixed-focus, this will be 3720 0.</details> 3721 <hal_details>Mandatory for FULL devices; LIMITED devices 3722 must always set this value to 0 for fixed-focus; and may omit 3723 the minimum focus distance otherwise. 3724 3725 This field is also mandatory for all devices advertising 3726 the MANUAL_SENSOR capability.</hal_details> 3727 <tag id="V1" /> 3728 </entry> 3729 <entry name="shadingMapSize" type="int32" visibility="ndk_public" 3730 type_notes="width and height (N, M) of lens shading map provided by the camera device." 3731 container="array" typedef="size" hwlevel="full"> 3732 <array> 3733 <size>2</size> 3734 </array> 3735 <description>Dimensions of lens shading map.</description> 3736 <range>Both values &gt;= 1</range> 3737 <details> 3738 The map should be on the order of 30-40 rows and columns, and 3739 must be smaller than 64x64. 3740 </details> 3741 <tag id="V1" /> 3742 </entry> 3743 <entry name="focusDistanceCalibration" type="byte" visibility="public" 3744 enum="true" hwlevel="limited"> 3745 <enum> 3746 <value>UNCALIBRATED 3747 <notes> 3748 The lens focus distance is not accurate, and the units used for 3749 android.lens.focusDistance do not correspond to any physical units. 3750 3751 Setting the lens to the same focus distance on separate occasions may 3752 result in a different real focus distance, depending on factors such 3753 as the orientation of the device, the age of the focusing mechanism, 3754 and the device temperature. The focus distance value will still be 3755 in the range of `[0, android.lens.info.minimumFocusDistance]`, where 0 3756 represents the farthest focus. 3757 </notes> 3758 </value> 3759 <value>APPROXIMATE 3760 <notes> 3761 The lens focus distance is measured in diopters. 3762 3763 However, setting the lens to the same focus distance 3764 on separate occasions may result in a different real 3765 focus distance, depending on factors such as the 3766 orientation of the device, the age of the focusing 3767 mechanism, and the device temperature. 3768 </notes> 3769 </value> 3770 <value>CALIBRATED 3771 <notes> 3772 The lens focus distance is measured in diopters, and 3773 is calibrated. 3774 3775 The lens mechanism is calibrated so that setting the 3776 same focus distance is repeatable on multiple 3777 occasions with good accuracy, and the focus distance 3778 corresponds to the real physical distance to the plane 3779 of best focus. 3780 </notes> 3781 </value> 3782 </enum> 3783 <description>The lens focus distance calibration quality.</description> 3784 <details> 3785 The lens focus distance calibration quality determines the reliability of 3786 focus related metadata entries, i.e. android.lens.focusDistance, 3787 android.lens.focusRange, android.lens.info.hyperfocalDistance, and 3788 android.lens.info.minimumFocusDistance. 3789 3790 APPROXIMATE and CALIBRATED devices report the focus metadata in 3791 units of diopters (1/meter), so `0.0f` represents focusing at infinity, 3792 and increasing positive numbers represent focusing closer and closer 3793 to the camera device. The focus distance control also uses diopters 3794 on these devices. 3795 3796 UNCALIBRATED devices do not use units that are directly comparable 3797 to any real physical measurement, but `0.0f` still represents farthest 3798 focus, and android.lens.info.minimumFocusDistance represents the 3799 nearest focus the device can achieve. 3800 </details> 3801 <hal_details> 3802 For devices advertise APPROXIMATE quality or higher, diopters 0 (infinity 3803 focus) must work. When autofocus is disabled (android.control.afMode == OFF) 3804 and the lens focus distance is set to 0 diopters 3805 (android.lens.focusDistance == 0), the lens will move to focus at infinity 3806 and is stably focused at infinity even if the device tilts. It may take the 3807 lens some time to move; during the move the lens state should be MOVING and 3808 the output diopter value should be changing toward 0. 3809 </hal_details> 3810 <tag id="V1" /> 3811 </entry> 3812 </namespace> 3813 <entry name="facing" type="byte" visibility="public" enum="true" hwlevel="legacy"> 3814 <enum> 3815 <value>FRONT 3816 <notes> 3817 The camera device faces the same direction as the device's screen. 3818 </notes></value> 3819 <value>BACK 3820 <notes> 3821 The camera device faces the opposite direction as the device's screen. 3822 </notes></value> 3823 <value>EXTERNAL 3824 <notes> 3825 The camera device is an external camera, and has no fixed facing relative to the 3826 device's screen. 3827 </notes></value> 3828 </enum> 3829 <description>Direction the camera faces relative to 3830 device screen.</description> 3831 </entry> 3832 <entry name="poseRotation" type="float" visibility="public" 3833 container="array"> 3834 <array> 3835 <size>4</size> 3836 </array> 3837 <description> 3838 The orientation of the camera relative to the sensor 3839 coordinate system. 3840 </description> 3841 <units> 3842 Quaternion coefficients 3843 </units> 3844 <details> 3845 The four coefficients that describe the quaternion 3846 rotation from the Android sensor coordinate system to a 3847 camera-aligned coordinate system where the X-axis is 3848 aligned with the long side of the image sensor, the Y-axis 3849 is aligned with the short side of the image sensor, and 3850 the Z-axis is aligned with the optical axis of the sensor. 3851 3852 To convert from the quaternion coefficients `(x,y,z,w)` 3853 to the axis of rotation `(a_x, a_y, a_z)` and rotation 3854 amount `theta`, the following formulas can be used: 3855 3856 theta = 2 * acos(w) 3857 a_x = x / sin(theta/2) 3858 a_y = y / sin(theta/2) 3859 a_z = z / sin(theta/2) 3860 3861 To create a 3x3 rotation matrix that applies the rotation 3862 defined by this quaternion, the following matrix can be 3863 used: 3864 3865 R = [ 1 - 2y^2 - 2z^2, 2xy - 2zw, 2xz + 2yw, 3866 2xy + 2zw, 1 - 2x^2 - 2z^2, 2yz - 2xw, 3867 2xz - 2yw, 2yz + 2xw, 1 - 2x^2 - 2y^2 ] 3868 3869 This matrix can then be used to apply the rotation to a 3870 column vector point with 3871 3872 `p' = Rp` 3873 3874 where `p` is in the device sensor coordinate system, and 3875 `p'` is in the camera-oriented coordinate system. 3876 </details> 3877 <tag id="DEPTH" /> 3878 </entry> 3879 <entry name="poseTranslation" type="float" visibility="public" 3880 container="array"> 3881 <array> 3882 <size>3</size> 3883 </array> 3884 <description>Position of the camera optical center.</description> 3885 <units>Meters</units> 3886 <details> 3887 The position of the camera device's lens optical center, 3888 as a three-dimensional vector `(x,y,z)`. 3889 3890 Prior to Android P, or when android.lens.poseReference is PRIMARY_CAMERA, this position 3891 is relative to the optical center of the largest camera device facing in the same 3892 direction as this camera, in the {@link android.hardware.SensorEvent Android sensor 3893 coordinate axes}. Note that only the axis definitions are shared with the sensor 3894 coordinate system, but not the origin. 3895 3896 If this device is the largest or only camera device with a given facing, then this 3897 position will be `(0, 0, 0)`; a camera device with a lens optical center located 3 cm 3898 from the main sensor along the +X axis (to the right from the user's perspective) will 3899 report `(0.03, 0, 0)`. 3900 3901 To transform a pixel coordinates between two cameras facing the same direction, first 3902 the source camera android.lens.distortion must be corrected for. Then the source 3903 camera android.lens.intrinsicCalibration needs to be applied, followed by the 3904 android.lens.poseRotation of the source camera, the translation of the source camera 3905 relative to the destination camera, the android.lens.poseRotation of the destination 3906 camera, and finally the inverse of android.lens.intrinsicCalibration of the destination 3907 camera. This obtains a radial-distortion-free coordinate in the destination camera pixel 3908 coordinates. 3909 3910 To compare this against a real image from the destination camera, the destination camera 3911 image then needs to be corrected for radial distortion before comparison or sampling. 3912 3913 When android.lens.poseReference is GYROSCOPE, then this position is relative to 3914 the center of the primary gyroscope on the device. 3915 </details> 3916 <tag id="DEPTH" /> 3917 </entry> 3918 </static> 3919 <dynamic> 3920 <clone entry="android.lens.aperture" kind="controls"> 3921 <tag id="V1" /> 3922 </clone> 3923 <clone entry="android.lens.filterDensity" kind="controls"> 3924 <tag id="V1" /> 3925 </clone> 3926 <clone entry="android.lens.focalLength" kind="controls"> 3927 <tag id="BC" /> 3928 </clone> 3929 <clone entry="android.lens.focusDistance" kind="controls"> 3930 <details>Should be zero for fixed-focus cameras</details> 3931 <tag id="BC" /> 3932 </clone> 3933 <entry name="focusRange" type="float" visibility="public" 3934 type_notes="Range of scene distances that are in focus" 3935 container="array" typedef="pairFloatFloat" hwlevel="limited"> 3936 <array> 3937 <size>2</size> 3938 </array> 3939 <description>The range of scene distances that are in 3940 sharp focus (depth of field).</description> 3941 <units>A pair of focus distances in diopters: (near, 3942 far); see android.lens.info.focusDistanceCalibration for details.</units> 3943 <range>&gt;=0</range> 3944 <details>If variable focus not supported, can still report 3945 fixed depth of field range</details> 3946 <tag id="BC" /> 3947 </entry> 3948 <clone entry="android.lens.opticalStabilizationMode" 3949 kind="controls"> 3950 <tag id="V1" /> 3951 </clone> 3952 <entry name="state" type="byte" visibility="public" enum="true" hwlevel="limited"> 3953 <enum> 3954 <value>STATIONARY 3955 <notes> 3956 The lens parameters (android.lens.focalLength, android.lens.focusDistance, 3957 android.lens.filterDensity and android.lens.aperture) are not changing. 3958 </notes> 3959 </value> 3960 <value>MOVING 3961 <notes> 3962 One or several of the lens parameters 3963 (android.lens.focalLength, android.lens.focusDistance, 3964 android.lens.filterDensity or android.lens.aperture) is 3965 currently changing. 3966 </notes> 3967 </value> 3968 </enum> 3969 <description>Current lens status.</description> 3970 <details> 3971 For lens parameters android.lens.focalLength, android.lens.focusDistance, 3972 android.lens.filterDensity and android.lens.aperture, when changes are requested, 3973 they may take several frames to reach the requested values. This state indicates 3974 the current status of the lens parameters. 3975 3976 When the state is STATIONARY, the lens parameters are not changing. This could be 3977 either because the parameters are all fixed, or because the lens has had enough 3978 time to reach the most recently-requested values. 3979 If all these lens parameters are not changable for a camera device, as listed below: 3980 3981 * Fixed focus (`android.lens.info.minimumFocusDistance == 0`), which means 3982 android.lens.focusDistance parameter will always be 0. 3983 * Fixed focal length (android.lens.info.availableFocalLengths contains single value), 3984 which means the optical zoom is not supported. 3985 * No ND filter (android.lens.info.availableFilterDensities contains only 0). 3986 * Fixed aperture (android.lens.info.availableApertures contains single value). 3987 3988 Then this state will always be STATIONARY. 3989 3990 When the state is MOVING, it indicates that at least one of the lens parameters 3991 is changing. 3992 </details> 3993 <tag id="V1" /> 3994 </entry> 3995 <clone entry="android.lens.poseRotation" kind="static"> 3996 </clone> 3997 <clone entry="android.lens.poseTranslation" kind="static"> 3998 </clone> 3999 </dynamic> 4000 <static> 4001 <entry name="intrinsicCalibration" type="float" visibility="public" 4002 container="array"> 4003 <array> 4004 <size>5</size> 4005 </array> 4006 <description> 4007 The parameters for this camera device's intrinsic 4008 calibration. 4009 </description> 4010 <units> 4011 Pixels in the 4012 android.sensor.info.preCorrectionActiveArraySize 4013 coordinate system. 4014 </units> 4015 <details> 4016 The five calibration parameters that describe the 4017 transform from camera-centric 3D coordinates to sensor 4018 pixel coordinates: 4019 4020 [f_x, f_y, c_x, c_y, s] 4021 4022 Where `f_x` and `f_y` are the horizontal and vertical 4023 focal lengths, `[c_x, c_y]` is the position of the optical 4024 axis, and `s` is a skew parameter for the sensor plane not 4025 being aligned with the lens plane. 4026 4027 These are typically used within a transformation matrix K: 4028 4029 K = [ f_x, s, c_x, 4030 0, f_y, c_y, 4031 0 0, 1 ] 4032 4033 which can then be combined with the camera pose rotation 4034 `R` and translation `t` (android.lens.poseRotation and 4035 android.lens.poseTranslation, respective) to calculate the 4036 complete transform from world coordinates to pixel 4037 coordinates: 4038 4039 P = [ K 0 * [ R t 4040 0 1 ] 0 1 ] 4041 4042 and with `p_w` being a point in the world coordinate system 4043 and `p_s` being a point in the camera active pixel array 4044 coordinate system, and with the mapping including the 4045 homogeneous division by z: 4046 4047 p_h = (x_h, y_h, z_h) = P p_w 4048 p_s = p_h / z_h 4049 4050 so `[x_s, y_s]` is the pixel coordinates of the world 4051 point, `z_s = 1`, and `w_s` is a measurement of disparity 4052 (depth) in pixel coordinates. 4053 4054 Note that the coordinate system for this transform is the 4055 android.sensor.info.preCorrectionActiveArraySize system, 4056 where `(0,0)` is the top-left of the 4057 preCorrectionActiveArraySize rectangle. Once the pose and 4058 intrinsic calibration transforms have been applied to a 4059 world point, then the android.lens.distortion 4060 transform needs to be applied, and the result adjusted to 4061 be in the android.sensor.info.activeArraySize coordinate 4062 system (where `(0, 0)` is the top-left of the 4063 activeArraySize rectangle), to determine the final pixel 4064 coordinate of the world point for processed (non-RAW) 4065 output buffers. 4066 </details> 4067 <tag id="DEPTH" /> 4068 </entry> 4069 <entry name="radialDistortion" type="float" visibility="public" 4070 deprecated="true" container="array"> 4071 <array> 4072 <size>6</size> 4073 </array> 4074 <description> 4075 The correction coefficients to correct for this camera device's 4076 radial and tangential lens distortion. 4077 </description> 4078 <deprecation_description> 4079 This field was inconsistently defined in terms of its 4080 normalization. Use android.lens.distortion instead. 4081 </deprecation_description> 4082 <units> 4083 Unitless coefficients. 4084 </units> 4085 <details> 4086 Four radial distortion coefficients `[kappa_0, kappa_1, kappa_2, 4087 kappa_3]` and two tangential distortion coefficients 4088 `[kappa_4, kappa_5]` that can be used to correct the 4089 lens's geometric distortion with the mapping equations: 4090 4091 x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) + 4092 kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 ) 4093 y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) + 4094 kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 ) 4095 4096 Here, `[x_c, y_c]` are the coordinates to sample in the 4097 input image that correspond to the pixel values in the 4098 corrected image at the coordinate `[x_i, y_i]`: 4099 4100 correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage) 4101 4102 The pixel coordinates are defined in a normalized 4103 coordinate system related to the 4104 android.lens.intrinsicCalibration calibration fields. 4105 Both `[x_i, y_i]` and `[x_c, y_c]` have `(0,0)` at the 4106 lens optical center `[c_x, c_y]`. The maximum magnitudes 4107 of both x and y coordinates are normalized to be 1 at the 4108 edge further from the optical center, so the range 4109 for both dimensions is `-1 <= x <= 1`. 4110 4111 Finally, `r` represents the radial distance from the 4112 optical center, `r^2 = x_i^2 + y_i^2`, and its magnitude 4113 is therefore no larger than `|r| <= sqrt(2)`. 4114 4115 The distortion model used is the Brown-Conrady model. 4116 </details> 4117 <tag id="DEPTH" /> 4118 </entry> 4119 </static> 4120 <dynamic> 4121 <clone entry="android.lens.intrinsicCalibration" kind="static"> 4122 </clone> 4123 <clone entry="android.lens.radialDistortion" kind="static"> 4124 </clone> 4125 </dynamic> 4126 <static> 4127 <entry name="poseReference" type="byte" visibility="public" enum="true" hal_version="3.3"> 4128 <enum> 4129 <value>PRIMARY_CAMERA 4130 <notes>The value of android.lens.poseTranslation is relative to the optical center of 4131 the largest camera device facing the same direction as this camera. 4132 4133 This is the default value for API levels before Android P. 4134 </notes> 4135 </value> 4136 <value>GYROSCOPE 4137 <notes>The value of android.lens.poseTranslation is relative to the position of the 4138 primary gyroscope of this Android device. 4139 </notes> 4140 </value> 4141 </enum> 4142 <description> 4143 The origin for android.lens.poseTranslation. 4144 </description> 4145 <details> 4146 Different calibration methods and use cases can produce better or worse results 4147 depending on the selected coordinate origin. 4148 </details> 4149 </entry> 4150 <entry name="distortion" type="float" visibility="public" container="array" 4151 hal_version="3.3"> 4152 <array> 4153 <size>5</size> 4154 </array> 4155 <description> 4156 The correction coefficients to correct for this camera device's 4157 radial and tangential lens distortion. 4158 4159 Replaces the deprecated android.lens.radialDistortion field, which was 4160 inconsistently defined. 4161 </description> 4162 <units> 4163 Unitless coefficients. 4164 </units> 4165 <details> 4166 Three radial distortion coefficients `[kappa_1, kappa_2, 4167 kappa_3]` and two tangential distortion coefficients 4168 `[kappa_4, kappa_5]` that can be used to correct the 4169 lens's geometric distortion with the mapping equations: 4170 4171 x_c = x_i * ( 1 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) + 4172 kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 ) 4173 y_c = y_i * ( 1 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) + 4174 kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 ) 4175 4176 Here, `[x_c, y_c]` are the coordinates to sample in the 4177 input image that correspond to the pixel values in the 4178 corrected image at the coordinate `[x_i, y_i]`: 4179 4180 correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage) 4181 4182 The pixel coordinates are defined in a coordinate system 4183 related to the android.lens.intrinsicCalibration 4184 calibration fields; see that entry for details of the mapping stages. 4185 Both `[x_i, y_i]` and `[x_c, y_c]` 4186 have `(0,0)` at the lens optical center `[c_x, c_y]`, and 4187 the range of the coordinates depends on the focal length 4188 terms of the intrinsic calibration. 4189 4190 Finally, `r` represents the radial distance from the 4191 optical center, `r^2 = x_i^2 + y_i^2`. 4192 4193 The distortion model used is the Brown-Conrady model. 4194 </details> 4195 <tag id="DEPTH" /> 4196 </entry> 4197 </static> 4198 <dynamic> 4199 <clone entry="android.lens.distortion" kind="static"> 4200 </clone> 4201 </dynamic> 4202 </section> 4203 <section name="noiseReduction"> 4204 <controls> 4205 <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full"> 4206 <enum> 4207 <value>OFF 4208 <notes>No noise reduction is applied.</notes></value> 4209 <value>FAST 4210 <notes>Noise reduction is applied without reducing frame rate relative to sensor 4211 output. It may be the same as OFF if noise reduction will reduce frame rate 4212 relative to sensor.</notes></value> 4213 <value>HIGH_QUALITY 4214 <notes>High-quality noise reduction is applied, at the cost of possibly reduced frame 4215 rate relative to sensor output.</notes></value> 4216 <value optional="true">MINIMAL 4217 <notes>MINIMAL noise reduction is applied without reducing frame rate relative to 4218 sensor output. </notes></value> 4219 <value optional="true">ZERO_SHUTTER_LAG 4220 4221 <notes>Noise reduction is applied at different levels for different output streams, 4222 based on resolution. Streams at maximum recording resolution (see {@link 4223 android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession}) 4224 or below have noise reduction applied, while higher-resolution streams have MINIMAL (if 4225 supported) or no noise reduction applied (if MINIMAL is not supported.) The degree of 4226 noise reduction for low-resolution streams is tuned so that frame rate is not impacted, 4227 and the quality is equal to or better than FAST (since it is only applied to 4228 lower-resolution outputs, quality may improve from FAST). 4229 4230 This mode is intended to be used by applications operating in a zero-shutter-lag mode 4231 with YUV or PRIVATE reprocessing, where the application continuously captures 4232 high-resolution intermediate buffers into a circular buffer, from which a final image is 4233 produced via reprocessing when a user takes a picture. For such a use case, the 4234 high-resolution buffers must not have noise reduction applied to maximize efficiency of 4235 preview and to avoid over-applying noise filtering when reprocessing, while 4236 low-resolution buffers (used for recording or preview, generally) need noise reduction 4237 applied for reasonable preview quality. 4238 4239 This mode is guaranteed to be supported by devices that support either the 4240 YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities 4241 (android.request.availableCapabilities lists either of those capabilities) and it will 4242 be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template. 4243 </notes></value> 4244 </enum> 4245 <description>Mode of operation for the noise reduction algorithm.</description> 4246 <range>android.noiseReduction.availableNoiseReductionModes</range> 4247 <details>The noise reduction algorithm attempts to improve image quality by removing 4248 excessive noise added by the capture process, especially in dark conditions. 4249 4250 OFF means no noise reduction will be applied by the camera device, for both raw and 4251 YUV domain. 4252 4253 MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove 4254 demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF. 4255 This mode is optional, may not be support by all devices. The application should check 4256 android.noiseReduction.availableNoiseReductionModes before using it. 4257 4258 FAST/HIGH_QUALITY both mean camera device determined noise filtering 4259 will be applied. HIGH_QUALITY mode indicates that the camera device 4260 will use the highest-quality noise filtering algorithms, 4261 even if it slows down capture rate. FAST means the camera device will not 4262 slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if 4263 MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate. 4264 Every output stream will have a similar amount of enhancement applied. 4265 4266 ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular 4267 buffer of high-resolution images during preview and reprocess image(s) from that buffer 4268 into a final capture when triggered by the user. In this mode, the camera device applies 4269 noise reduction to low-resolution streams (below maximum recording resolution) to maximize 4270 preview quality, but does not apply noise reduction to high-resolution streams, since 4271 those will be reprocessed later if necessary. 4272 4273 For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device 4274 will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device 4275 may adjust the noise reduction parameters for best image quality based on the 4276 android.reprocess.effectiveExposureFactor if it is set. 4277 </details> 4278 <hal_details> 4279 For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to 4280 adjust the internal noise reduction parameters appropriately to get the best quality 4281 images. 4282 </hal_details> 4283 <tag id="V1" /> 4284 <tag id="REPROC" /> 4285 </entry> 4286 <entry name="strength" type="byte"> 4287 <description>Control the amount of noise reduction 4288 applied to the images</description> 4289 <units>1-10; 10 is max noise reduction</units> 4290 <range>1 - 10</range> 4291 <tag id="FUTURE" /> 4292 </entry> 4293 </controls> 4294 <static> 4295 <entry name="availableNoiseReductionModes" type="byte" visibility="public" 4296 type_notes="list of enums" container="array" typedef="enumList" hwlevel="limited"> 4297 <array> 4298 <size>n</size> 4299 </array> 4300 <description> 4301 List of noise reduction modes for android.noiseReduction.mode that are supported 4302 by this camera device. 4303 </description> 4304 <range>Any value listed in android.noiseReduction.mode</range> 4305 <details> 4306 Full-capability camera devices will always support OFF and FAST. 4307 4308 Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support 4309 ZERO_SHUTTER_LAG. 4310 4311 Legacy-capability camera devices will only support FAST mode. 4312 </details> 4313 <hal_details> 4314 HAL must support both FAST and HIGH_QUALITY if noise reduction control is available 4315 on the camera device, but the underlying implementation can be the same for both modes. 4316 That is, if the highest quality implementation on the camera device does not slow down 4317 capture rate, then FAST and HIGH_QUALITY will generate the same output. 4318 </hal_details> 4319 <tag id="V1" /> 4320 <tag id="REPROC" /> 4321 </entry> 4322 </static> 4323 <dynamic> 4324 <clone entry="android.noiseReduction.mode" kind="controls"> 4325 <tag id="V1" /> 4326 <tag id="REPROC" /> 4327 </clone> 4328 </dynamic> 4329 </section> 4330 <section name="quirks"> 4331 <static> 4332 <entry name="meteringCropRegion" type="byte" visibility="system" deprecated="true" optional="true"> 4333 <description>If set to 1, the camera service does not 4334 scale 'normalized' coordinates with respect to the crop 4335 region. This applies to metering input (a{e,f,wb}Region 4336 and output (face rectangles).</description> 4337 <deprecation_description> 4338 Not used in HALv3 or newer 4339 </deprecation_description> 4340 <details>Normalized coordinates refer to those in the 4341 (-1000,1000) range mentioned in the 4342 android.hardware.Camera API. 4343 4344 HAL implementations should instead always use and emit 4345 sensor array-relative coordinates for all region data. Does 4346 not need to be listed in static metadata. Support will be 4347 removed in future versions of camera service.</details> 4348 </entry> 4349 <entry name="triggerAfWithAuto" type="byte" visibility="system" deprecated="true" optional="true"> 4350 <description>If set to 1, then the camera service always 4351 switches to FOCUS_MODE_AUTO before issuing a AF 4352 trigger.</description> 4353 <deprecation_description> 4354 Not used in HALv3 or newer 4355 </deprecation_description> 4356 <details>HAL implementations should implement AF trigger 4357 modes for AUTO, MACRO, CONTINUOUS_FOCUS, and 4358 CONTINUOUS_PICTURE modes instead of using this flag. Does 4359 not need to be listed in static metadata. Support will be 4360 removed in future versions of camera service</details> 4361 </entry> 4362 <entry name="useZslFormat" type="byte" visibility="system" deprecated="true" optional="true"> 4363 <description>If set to 1, the camera service uses 4364 CAMERA2_PIXEL_FORMAT_ZSL instead of 4365 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED for the zero 4366 shutter lag stream</description> 4367 <deprecation_description> 4368 Not used in HALv3 or newer 4369 </deprecation_description> 4370 <details>HAL implementations should use gralloc usage flags 4371 to determine that a stream will be used for 4372 zero-shutter-lag, instead of relying on an explicit 4373 format setting. Does not need to be listed in static 4374 metadata. Support will be removed in future versions of 4375 camera service.</details> 4376 </entry> 4377 <entry name="usePartialResult" type="byte" visibility="hidden" deprecated="true" optional="true"> 4378 <description> 4379 If set to 1, the HAL will always split result 4380 metadata for a single capture into multiple buffers, 4381 returned using multiple process_capture_result calls. 4382 </description> 4383 <deprecation_description> 4384 Not used in HALv3 or newer; replaced by better partials mechanism 4385 </deprecation_description> 4386 <details> 4387 Does not need to be listed in static 4388 metadata. Support for partial results will be reworked in 4389 future versions of camera service. This quirk will stop 4390 working at that point; DO NOT USE without careful 4391 consideration of future support. 4392 </details> 4393 <hal_details> 4394 Refer to `camera3_capture_result::partial_result` 4395 for information on how to implement partial results. 4396 </hal_details> 4397 </entry> 4398 </static> 4399 <dynamic> 4400 <entry name="partialResult" type="byte" visibility="hidden" deprecated="true" optional="true" enum="true" typedef="boolean"> 4401 <enum> 4402 <value>FINAL 4403 <notes>The last or only metadata result buffer 4404 for this capture.</notes> 4405 </value> 4406 <value>PARTIAL 4407 <notes>A partial buffer of result metadata for this 4408 capture. More result buffers for this capture will be sent 4409 by the camera device, the last of which will be marked 4410 FINAL.</notes> 4411 </value> 4412 </enum> 4413 <description> 4414 Whether a result given to the framework is the 4415 final one for the capture, or only a partial that contains a 4416 subset of the full set of dynamic metadata 4417 values.</description> 4418 <deprecation_description> 4419 Not used in HALv3 or newer 4420 </deprecation_description> 4421 <range>Optional. Default value is FINAL.</range> 4422 <details> 4423 The entries in the result metadata buffers for a 4424 single capture may not overlap, except for this entry. The 4425 FINAL buffers must retain FIFO ordering relative to the 4426 requests that generate them, so the FINAL buffer for frame 3 must 4427 always be sent to the framework after the FINAL buffer for frame 2, and 4428 before the FINAL buffer for frame 4. PARTIAL buffers may be returned 4429 in any order relative to other frames, but all PARTIAL buffers for a given 4430 capture must arrive before the FINAL buffer for that capture. This entry may 4431 only be used by the camera device if quirks.usePartialResult is set to 1. 4432 </details> 4433 <hal_details> 4434 Refer to `camera3_capture_result::partial_result` 4435 for information on how to implement partial results. 4436 </hal_details> 4437 </entry> 4438 </dynamic> 4439 </section> 4440 <section name="request"> 4441 <controls> 4442 <entry name="frameCount" type="int32" visibility="system" deprecated="true"> 4443 <description>A frame counter set by the framework. Must 4444 be maintained unchanged in output frame. This value monotonically 4445 increases with every new result (that is, each new result has a unique 4446 frameCount value). 4447 </description> 4448 <deprecation_description> 4449 Not used in HALv3 or newer 4450 </deprecation_description> 4451 <units>incrementing integer</units> 4452 <range>Any int.</range> 4453 </entry> 4454 <entry name="id" type="int32" visibility="hidden"> 4455 <description>An application-specified ID for the current 4456 request. Must be maintained unchanged in output 4457 frame</description> 4458 <units>arbitrary integer assigned by application</units> 4459 <range>Any int</range> 4460 <tag id="V1" /> 4461 </entry> 4462 <entry name="inputStreams" type="int32" visibility="system" deprecated="true" 4463 container="array"> 4464 <array> 4465 <size>n</size> 4466 </array> 4467 <description>List which camera reprocess stream is used 4468 for the source of reprocessing data.</description> 4469 <deprecation_description> 4470 Not used in HALv3 or newer 4471 </deprecation_description> 4472 <units>List of camera reprocess stream IDs</units> 4473 <range> 4474 Typically, only one entry allowed, must be a valid reprocess stream ID. 4475 </range> 4476 <details>Only meaningful when android.request.type == 4477 REPROCESS. Ignored otherwise</details> 4478 <tag id="HAL2" /> 4479 </entry> 4480 <entry name="metadataMode" type="byte" visibility="system" 4481 enum="true"> 4482 <enum> 4483 <value>NONE 4484 <notes>No metadata should be produced on output, except 4485 for application-bound buffer data. If no 4486 application-bound streams exist, no frame should be 4487 placed in the output frame queue. If such streams 4488 exist, a frame should be placed on the output queue 4489 with null metadata but with the necessary output buffer 4490 information. Timestamp information should still be 4491 included with any output stream buffers</notes></value> 4492 <value>FULL 4493 <notes>All metadata should be produced. Statistics will 4494 only be produced if they are separately 4495 enabled</notes></value> 4496 </enum> 4497 <description>How much metadata to produce on 4498 output</description> 4499 <tag id="FUTURE" /> 4500 </entry> 4501 <entry name="outputStreams" type="int32" visibility="system" deprecated="true" 4502 container="array"> 4503 <array> 4504 <size>n</size> 4505 </array> 4506 <description>Lists which camera output streams image data 4507 from this capture must be sent to</description> 4508 <deprecation_description> 4509 Not used in HALv3 or newer 4510 </deprecation_description> 4511 <units>List of camera stream IDs</units> 4512 <range>List must only include streams that have been 4513 created</range> 4514 <details>If no output streams are listed, then the image 4515 data should simply be discarded. The image data must 4516 still be captured for metadata and statistics production, 4517 and the lens and flash must operate as requested.</details> 4518 <tag id="HAL2" /> 4519 </entry> 4520 <entry name="type" type="byte" visibility="system" deprecated="true" enum="true"> 4521 <enum> 4522 <value>CAPTURE 4523 <notes>Capture a new image from the imaging hardware, 4524 and process it according to the 4525 settings</notes></value> 4526 <value>REPROCESS 4527 <notes>Process previously captured data; the 4528 android.request.inputStreams parameter determines the 4529 source reprocessing stream. TODO: Mark dynamic metadata 4530 needed for reprocessing with [RP]</notes></value> 4531 </enum> 4532 <description>The type of the request; either CAPTURE or 4533 REPROCESS. For legacy HAL3, this tag is redundant. 4534 </description> 4535 <deprecation_description> 4536 Not used in HALv3 or newer 4537 </deprecation_description> 4538 <tag id="HAL2" /> 4539 </entry> 4540 </controls> 4541 <static> 4542 <entry name="maxNumOutputStreams" type="int32" visibility="ndk_public" 4543 container="array" hwlevel="legacy"> 4544 <array> 4545 <size>3</size> 4546 </array> 4547 <description>The maximum numbers of different types of output streams 4548 that can be configured and used simultaneously by a camera device. 4549 </description> 4550 <range> 4551 For processed (and stalling) format streams, &gt;= 1. 4552 4553 For Raw format (either stalling or non-stalling) streams, &gt;= 0. 4554 4555 For processed (but not stalling) format streams, &gt;= 3 4556 for FULL mode devices (`android.info.supportedHardwareLevel == FULL`); 4557 &gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`). 4558 </range> 4559 <details> 4560 This is a 3 element tuple that contains the max number of output simultaneous 4561 streams for raw sensor, processed (but not stalling), and processed (and stalling) 4562 formats respectively. For example, assuming that JPEG is typically a processed and 4563 stalling stream, if max raw sensor format output stream number is 1, max YUV streams 4564 number is 3, and max JPEG stream number is 2, then this tuple should be `(1, 3, 2)`. 4565 4566 This lists the upper bound of the number of output streams supported by 4567 the camera device. Using more streams simultaneously may require more hardware and 4568 CPU resources that will consume more power. The image format for an output stream can 4569 be any supported format provided by android.scaler.availableStreamConfigurations. 4570 The formats defined in android.scaler.availableStreamConfigurations can be catergorized 4571 into the 3 stream types as below: 4572 4573 * Processed (but stalling): any non-RAW format with a stallDurations &gt; 0. 4574 Typically {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG format}. 4575 * Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16 4576 RAW_SENSOR}, {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10 RAW10}, or 4577 {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12 RAW12}. 4578 * Processed (but not-stalling): any non-RAW format without a stall duration. Typically 4579 {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888 YUV_420_888}, 4580 {@link android.graphics.ImageFormat#NV21 NV21}, or {@link 4581 android.graphics.ImageFormat#YV12 YV12}. 4582 </details> 4583 <tag id="BC" /> 4584 </entry> 4585 <entry name="maxNumOutputRaw" type="int32" visibility="java_public" synthetic="true" 4586 hwlevel="legacy"> 4587 <description>The maximum numbers of different types of output streams 4588 that can be configured and used simultaneously by a camera device 4589 for any `RAW` formats. 4590 </description> 4591 <range> 4592 &gt;= 0 4593 </range> 4594 <details> 4595 This value contains the max number of output simultaneous 4596 streams from the raw sensor. 4597 4598 This lists the upper bound of the number of output streams supported by 4599 the camera device. Using more streams simultaneously may require more hardware and 4600 CPU resources that will consume more power. The image format for this kind of an output stream can 4601 be any `RAW` and supported format provided by android.scaler.streamConfigurationMap. 4602 4603 In particular, a `RAW` format is typically one of: 4604 4605 * {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16 RAW_SENSOR} 4606 * {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10 RAW10} 4607 * {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12 RAW12} 4608 4609 LEGACY mode devices (android.info.supportedHardwareLevel `==` LEGACY) 4610 never support raw streams. 4611 </details> 4612 </entry> 4613 <entry name="maxNumOutputProc" type="int32" visibility="java_public" synthetic="true" 4614 hwlevel="legacy"> 4615 <description>The maximum numbers of different types of output streams 4616 that can be configured and used simultaneously by a camera device 4617 for any processed (but not-stalling) formats. 4618 </description> 4619 <range> 4620 &gt;= 3 4621 for FULL mode devices (`android.info.supportedHardwareLevel == FULL`); 4622 &gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`). 4623 </range> 4624 <details> 4625 This value contains the max number of output simultaneous 4626 streams for any processed (but not-stalling) formats. 4627 4628 This lists the upper bound of the number of output streams supported by 4629 the camera device. Using more streams simultaneously may require more hardware and 4630 CPU resources that will consume more power. The image format for this kind of an output stream can 4631 be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap. 4632 4633 Processed (but not-stalling) is defined as any non-RAW format without a stall duration. 4634 Typically: 4635 4636 * {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888 YUV_420_888} 4637 * {@link android.graphics.ImageFormat#NV21 NV21} 4638 * {@link android.graphics.ImageFormat#YV12 YV12} 4639 * Implementation-defined formats, i.e. {@link 4640 android.hardware.camera2.params.StreamConfigurationMap#isOutputSupportedFor(Class)} 4641 4642 For full guarantees, query {@link 4643 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a 4644 processed format -- it will return 0 for a non-stalling stream. 4645 4646 LEGACY devices will support at least 2 processing/non-stalling streams. 4647 </details> 4648 </entry> 4649 <entry name="maxNumOutputProcStalling" type="int32" visibility="java_public" synthetic="true" 4650 hwlevel="legacy"> 4651 <description>The maximum numbers of different types of output streams 4652 that can be configured and used simultaneously by a camera device 4653 for any processed (and stalling) formats. 4654 </description> 4655 <range> 4656 &gt;= 1 4657 </range> 4658 <details> 4659 This value contains the max number of output simultaneous 4660 streams for any processed (but not-stalling) formats. 4661 4662 This lists the upper bound of the number of output streams supported by 4663 the camera device. Using more streams simultaneously may require more hardware and 4664 CPU resources that will consume more power. The image format for this kind of an output stream can 4665 be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap. 4666 4667 A processed and stalling format is defined as any non-RAW format with a stallDurations 4668 &gt; 0. Typically only the {@link 4669 android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG format} is a stalling format. 4670 4671 For full guarantees, query {@link 4672 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a 4673 processed format -- it will return a non-0 value for a stalling stream. 4674 4675 LEGACY devices will support up to 1 processing/stalling stream. 4676 </details> 4677 </entry> 4678 <entry name="maxNumReprocessStreams" type="int32" visibility="system" 4679 deprecated="true" container="array"> 4680 <array> 4681 <size>1</size> 4682 </array> 4683 <description>How many reprocessing streams of any type 4684 can be allocated at the same time.</description> 4685 <deprecation_description> 4686 Not used in HALv3 or newer 4687 </deprecation_description> 4688 <range>&gt;= 0</range> 4689 <details> 4690 Only used by HAL2.x. 4691 4692 When set to 0, it means no reprocess stream is supported. 4693 </details> 4694 <tag id="HAL2" /> 4695 </entry> 4696 <entry name="maxNumInputStreams" type="int32" visibility="java_public" hwlevel="full"> 4697 <description> 4698 The maximum numbers of any type of input streams 4699 that can be configured and used simultaneously by a camera device. 4700 </description> 4701 <range> 4702 0 or 1. 4703 </range> 4704 <details>When set to 0, it means no input stream is supported. 4705 4706 The image format for a input stream can be any supported format returned by {@link 4707 android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. When using an 4708 input stream, there must be at least one output stream configured to to receive the 4709 reprocessed images. 4710 4711 When an input stream and some output streams are used in a reprocessing request, 4712 only the input buffer will be used to produce these output stream buffers, and a 4713 new sensor image will not be captured. 4714 4715 For example, for Zero Shutter Lag (ZSL) still capture use case, the input 4716 stream image format will be PRIVATE, the associated output stream image format 4717 should be JPEG. 4718 </details> 4719 <hal_details> 4720 For the reprocessing flow and controls, see 4721 hardware/libhardware/include/hardware/camera3.h Section 10 for more details. 4722 </hal_details> 4723 <tag id="REPROC" /> 4724 </entry> 4725 </static> 4726 <dynamic> 4727 <entry name="frameCount" type="int32" visibility="hidden" deprecated="true"> 4728 <description>A frame counter set by the framework. This value monotonically 4729 increases with every new result (that is, each new result has a unique 4730 frameCount value).</description> 4731 <deprecation_description> 4732 Not used in HALv3 or newer 4733 </deprecation_description> 4734 <units>count of frames</units> 4735 <range>&gt; 0</range> 4736 <details>Reset on release()</details> 4737 </entry> 4738 <clone entry="android.request.id" kind="controls"></clone> 4739 <clone entry="android.request.metadataMode" 4740 kind="controls"></clone> 4741 <clone entry="android.request.outputStreams" 4742 kind="controls"></clone> 4743 <entry name="pipelineDepth" type="byte" visibility="public" hwlevel="legacy"> 4744 <description>Specifies the number of pipeline stages the frame went 4745 through from when it was exposed to when the final completed result 4746 was available to the framework.</description> 4747 <range>&lt;= android.request.pipelineMaxDepth</range> 4748 <details>Depending on what settings are used in the request, and 4749 what streams are configured, the data may undergo less processing, 4750 and some pipeline stages skipped. 4751 4752 See android.request.pipelineMaxDepth for more details. 4753 </details> 4754 <hal_details> 4755 This value must always represent the accurate count of how many 4756 pipeline stages were actually used. 4757 </hal_details> 4758 </entry> 4759 </dynamic> 4760 <static> 4761 <entry name="pipelineMaxDepth" type="byte" visibility="public" hwlevel="legacy"> 4762 <description>Specifies the number of maximum pipeline stages a frame 4763 has to go through from when it's exposed to when it's available 4764 to the framework.</description> 4765 <details>A typical minimum value for this is 2 (one stage to expose, 4766 one stage to readout) from the sensor. The ISP then usually adds 4767 its own stages to do custom HW processing. Further stages may be 4768 added by SW processing. 4769 4770 Depending on what settings are used (e.g. YUV, JPEG) and what 4771 processing is enabled (e.g. face detection), the actual pipeline 4772 depth (specified by android.request.pipelineDepth) may be less than 4773 the max pipeline depth. 4774 4775 A pipeline depth of X stages is equivalent to a pipeline latency of 4776 X frame intervals. 4777 4778 This value will normally be 8 or less, however, for high speed capture session, 4779 the max pipeline depth will be up to 8 x size of high speed capture request list. 4780 </details> 4781 <hal_details> 4782 This value should be 4 or less, expect for the high speed recording session, where the 4783 max batch sizes may be larger than 1. 4784 </hal_details> 4785 </entry> 4786 <entry name="partialResultCount" type="int32" visibility="public" optional="true"> 4787 <description>Defines how many sub-components 4788 a result will be composed of. 4789 </description> 4790 <range>&gt;= 1</range> 4791 <details>In order to combat the pipeline latency, partial results 4792 may be delivered to the application layer from the camera device as 4793 soon as they are available. 4794 4795 Optional; defaults to 1. A value of 1 means that partial 4796 results are not supported, and only the final TotalCaptureResult will 4797 be produced by the camera device. 4798 4799 A typical use case for this might be: after requesting an 4800 auto-focus (AF) lock the new AF state might be available 50% 4801 of the way through the pipeline. The camera device could 4802 then immediately dispatch this state via a partial result to 4803 the application, and the rest of the metadata via later 4804 partial results. 4805 </details> 4806 </entry> 4807 <entry name="availableCapabilities" type="byte" visibility="public" 4808 enum="true" container="array" hwlevel="legacy"> 4809 <array> 4810 <size>n</size> 4811 </array> 4812 <enum> 4813 <value>BACKWARD_COMPATIBLE 4814 <notes>The minimal set of capabilities that every camera 4815 device (regardless of android.info.supportedHardwareLevel) 4816 supports. 4817 4818 This capability is listed by all normal devices, and 4819 indicates that the camera device has a feature set 4820 that's comparable to the baseline requirements for the 4821 older android.hardware.Camera API. 4822 4823 Devices with the DEPTH_OUTPUT capability might not list this 4824 capability, indicating that they support only depth measurement, 4825 not standard color output. 4826 </notes> 4827 </value> 4828 <value optional="true">MANUAL_SENSOR 4829 <notes> 4830 The camera device can be manually controlled (3A algorithms such 4831 as auto-exposure, and auto-focus can be bypassed). 4832 The camera device supports basic manual control of the sensor image 4833 acquisition related stages. This means the following controls are 4834 guaranteed to be supported: 4835 4836 * Manual frame duration control 4837 * android.sensor.frameDuration 4838 * android.sensor.info.maxFrameDuration 4839 * Manual exposure control 4840 * android.sensor.exposureTime 4841 * android.sensor.info.exposureTimeRange 4842 * Manual sensitivity control 4843 * android.sensor.sensitivity 4844 * android.sensor.info.sensitivityRange 4845 * Manual lens control (if the lens is adjustable) 4846 * android.lens.* 4847 * Manual flash control (if a flash unit is present) 4848 * android.flash.* 4849 * Manual black level locking 4850 * android.blackLevel.lock 4851 * Auto exposure lock 4852 * android.control.aeLock 4853 4854 If any of the above 3A algorithms are enabled, then the camera 4855 device will accurately report the values applied by 3A in the 4856 result. 4857 4858 A given camera device may also support additional manual sensor controls, 4859 but this capability only covers the above list of controls. 4860 4861 If this is supported, android.scaler.streamConfigurationMap will 4862 additionally return a min frame duration that is greater than 4863 zero for each supported size-format combination. 4864 </notes> 4865 </value> 4866 <value optional="true">MANUAL_POST_PROCESSING 4867 <notes> 4868 The camera device post-processing stages can be manually controlled. 4869 The camera device supports basic manual control of the image post-processing 4870 stages. This means the following controls are guaranteed to be supported: 4871 4872 * Manual tonemap control 4873 * android.tonemap.curve 4874 * android.tonemap.mode 4875 * android.tonemap.maxCurvePoints 4876 * android.tonemap.gamma 4877 * android.tonemap.presetCurve 4878 4879 * Manual white balance control 4880 * android.colorCorrection.transform 4881 * android.colorCorrection.gains 4882 * Manual lens shading map control 4883 * android.shading.mode 4884 * android.statistics.lensShadingMapMode 4885 * android.statistics.lensShadingMap 4886 * android.lens.info.shadingMapSize 4887 * Manual aberration correction control (if aberration correction is supported) 4888 * android.colorCorrection.aberrationMode 4889 * android.colorCorrection.availableAberrationModes 4890 * Auto white balance lock 4891 * android.control.awbLock 4892 4893 If auto white balance is enabled, then the camera device 4894 will accurately report the values applied by AWB in the result. 4895 4896 A given camera device may also support additional post-processing 4897 controls, but this capability only covers the above list of controls. 4898 </notes> 4899 </value> 4900 <value optional="true">RAW 4901 <notes> 4902 The camera device supports outputting RAW buffers and 4903 metadata for interpreting them. 4904 4905 Devices supporting the RAW capability allow both for 4906 saving DNG files, and for direct application processing of 4907 raw sensor images. 4908 4909 * RAW_SENSOR is supported as an output format. 4910 * The maximum available resolution for RAW_SENSOR streams 4911 will match either the value in 4912 android.sensor.info.pixelArraySize or 4913 android.sensor.info.preCorrectionActiveArraySize. 4914 * All DNG-related optional metadata entries are provided 4915 by the camera device. 4916 </notes> 4917 </value> 4918 <value optional="true" ndk_hidden="true">PRIVATE_REPROCESSING 4919 <notes> 4920 The camera device supports the Zero Shutter Lag reprocessing use case. 4921 4922 * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`. 4923 * {@link android.graphics.ImageFormat#PRIVATE} is supported as an output/input format, 4924 that is, {@link android.graphics.ImageFormat#PRIVATE} is included in the lists of 4925 formats returned by {@link 4926 android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link 4927 android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}. 4928 * {@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput} 4929 returns non empty int[] for each supported input format returned by {@link 4930 android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. 4931 * Each size returned by {@link 4932 android.hardware.camera2.params.StreamConfigurationMap#getInputSizes 4933 getInputSizes(ImageFormat.PRIVATE)} is also included in {@link 4934 android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes 4935 getOutputSizes(ImageFormat.PRIVATE)} 4936 * Using {@link android.graphics.ImageFormat#PRIVATE} does not cause a frame rate drop 4937 relative to the sensor's maximum capture rate (at that resolution). 4938 * {@link android.graphics.ImageFormat#PRIVATE} will be reprocessable into both 4939 {@link android.graphics.ImageFormat#YUV_420_888} and 4940 {@link android.graphics.ImageFormat#JPEG} formats. 4941 * The maximum available resolution for PRIVATE streams 4942 (both input/output) will match the maximum available 4943 resolution of JPEG streams. 4944 * Static metadata android.reprocess.maxCaptureStall. 4945 * Only below controls are effective for reprocessing requests and 4946 will be present in capture results, other controls in reprocess 4947 requests will be ignored by the camera device. 4948 * android.jpeg.* 4949 * android.noiseReduction.mode 4950 * android.edge.mode 4951 * android.noiseReduction.availableNoiseReductionModes and 4952 android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode. 4953 </notes> 4954 </value> 4955 <value optional="true">READ_SENSOR_SETTINGS 4956 <notes> 4957 The camera device supports accurately reporting the sensor settings for many of 4958 the sensor controls while the built-in 3A algorithm is running. This allows 4959 reporting of sensor settings even when these settings cannot be manually changed. 4960 4961 The values reported for the following controls are guaranteed to be available 4962 in the CaptureResult, including when 3A is enabled: 4963 4964 * Exposure control 4965 * android.sensor.exposureTime 4966 * Sensitivity control 4967 * android.sensor.sensitivity 4968 * Lens controls (if the lens is adjustable) 4969 * android.lens.focusDistance 4970 * android.lens.aperture 4971 4972 This capability is a subset of the MANUAL_SENSOR control capability, and will 4973 always be included if the MANUAL_SENSOR capability is available. 4974 </notes> 4975 </value> 4976 <value optional="true">BURST_CAPTURE 4977 <notes> 4978 The camera device supports capturing high-resolution images at >= 20 frames per 4979 second, in at least the uncompressed YUV format, when post-processing settings are set 4980 to FAST. Additionally, maximum-resolution images can be captured at >= 10 frames 4981 per second. Here, 'high resolution' means at least 8 megapixels, or the maximum 4982 resolution of the device, whichever is smaller. 4983 </notes> 4984 <sdk_notes> 4985 More specifically, this means that a size matching the camera device's active array 4986 size is listed as a supported size for the {@link 4987 android.graphics.ImageFormat#YUV_420_888} format in either {@link 4988 android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} or {@link 4989 android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes}, 4990 with a minimum frame duration for that format and size of either <= 1/20 s, or 4991 <= 1/10 s, respectively; and the android.control.aeAvailableTargetFpsRanges entry 4992 lists at least one FPS range where the minimum FPS is >= 1 / minimumFrameDuration 4993 for the maximum-size YUV_420_888 format. If that maximum size is listed in {@link 4994 android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes}, 4995 then the list of resolutions for YUV_420_888 from {@link 4996 android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} contains at 4997 least one resolution >= 8 megapixels, with a minimum frame duration of <= 1/20 4998 s. 4999 5000 If the device supports the {@link 5001 android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10}, {@link 5002 android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12}, then those can also be 5003 captured at the same rate as the maximum-size YUV_420_888 resolution is. 5004 5005 If the device supports the PRIVATE_REPROCESSING capability, then the same guarantees 5006 as for the YUV_420_888 format also apply to the {@link 5007 android.graphics.ImageFormat#PRIVATE} format. 5008 5009 In addition, the android.sync.maxLatency field is guaranted to have a value between 0 5010 and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable 5011 are also guaranteed to be `true` so burst capture with these two locks ON yields 5012 consistent image output. 5013 </sdk_notes> 5014 <ndk_notes> 5015 More specifically, this means that at least one output {@link 5016 android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888} size listed in 5017 {@link 5018 android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} 5019 is larger or equal to the 'high resolution' defined above, and can be captured at at 5020 least 20 fps. For the largest {@link 5021 android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888} size listed in 5022 {@link 5023 android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}, 5024 camera device can capture this size for at least 10 frames per second. Also the 5025 android.control.aeAvailableTargetFpsRanges entry lists at least one FPS range where 5026 the minimum FPS is >= 1 / minimumFrameDuration for the largest YUV_420_888 size. 5027 5028 If the device supports the {@link 5029 android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10}, {@link 5030 android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12}, then those can also be 5031 captured at the same rate as the maximum-size YUV_420_888 resolution is. 5032 5033 In addition, the android.sync.maxLatency field is guaranted to have a value between 0 5034 and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable 5035 are also guaranteed to be `true` so burst capture with these two locks ON yields 5036 consistent image output. 5037 </ndk_notes> 5038 </value> 5039 <value optional="true" ndk_hidden="true">YUV_REPROCESSING 5040 <notes> 5041 The camera device supports the YUV_420_888 reprocessing use case, similar as 5042 PRIVATE_REPROCESSING, This capability requires the camera device to support the 5043 following: 5044 5045 * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`. 5046 * {@link android.graphics.ImageFormat#YUV_420_888} is supported as an output/input 5047 format, that is, YUV_420_888 is included in the lists of formats returned by {@link 5048 android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link 5049 android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}. 5050 * {@link 5051 android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput} 5052 returns non-empty int[] for each supported input format returned by {@link 5053 android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. 5054 * Each size returned by {@link 5055 android.hardware.camera2.params.StreamConfigurationMap#getInputSizes 5056 getInputSizes(YUV_420_888)} is also included in {@link 5057 android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes 5058 getOutputSizes(YUV_420_888)} 5059 * Using {@link android.graphics.ImageFormat#YUV_420_888} does not cause a frame rate 5060 drop relative to the sensor's maximum capture rate (at that resolution). 5061 * {@link android.graphics.ImageFormat#YUV_420_888} will be reprocessable into both 5062 {@link android.graphics.ImageFormat#YUV_420_888} and {@link 5063 android.graphics.ImageFormat#JPEG} formats. 5064 * The maximum available resolution for {@link 5065 android.graphics.ImageFormat#YUV_420_888} streams (both input/output) will match the 5066 maximum available resolution of {@link android.graphics.ImageFormat#JPEG} streams. 5067 * Static metadata android.reprocess.maxCaptureStall. 5068 * Only the below controls are effective for reprocessing requests and will be present 5069 in capture results. The reprocess requests are from the original capture results 5070 that are associated with the intermediate {@link 5071 android.graphics.ImageFormat#YUV_420_888} output buffers. All other controls in the 5072 reprocess requests will be ignored by the camera device. 5073 * android.jpeg.* 5074 * android.noiseReduction.mode 5075 * android.edge.mode 5076 * android.reprocess.effectiveExposureFactor 5077 * android.noiseReduction.availableNoiseReductionModes and 5078 android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode. 5079 </notes> 5080 </value> 5081 <value optional="true">DEPTH_OUTPUT 5082 <notes> 5083 The camera device can produce depth measurements from its field of view. 5084 5085 This capability requires the camera device to support the following: 5086 5087 * {@link android.graphics.ImageFormat#DEPTH16|AIMAGE_FORMAT_DEPTH16} is supported as 5088 an output format. 5089 * {@link 5090 android.graphics.ImageFormat#DEPTH_POINT_CLOUD|AIMAGE_FORMAT_DEPTH_POINT_CLOUD} is 5091 optionally supported as an output format. 5092 * This camera device, and all camera devices with the same android.lens.facing, will 5093 list the following calibration metadata entries in both {@link 5094 android.hardware.camera2.CameraCharacteristics|ACameraManager_getCameraCharacteristics} 5095 and {@link 5096 android.hardware.camera2.CaptureResult|ACameraCaptureSession_captureCallback_result}: 5097 - android.lens.poseTranslation 5098 - android.lens.poseRotation 5099 - android.lens.intrinsicCalibration 5100 - android.lens.distortion 5101 * The android.depth.depthIsExclusive entry is listed by this device. 5102 * As of Android P, the android.lens.poseReference entry is listed by this device. 5103 * A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support 5104 normal YUV_420_888, JPEG, and PRIV-format outputs. It only has to support the DEPTH16 5105 format. 5106 5107 Generally, depth output operates at a slower frame rate than standard color capture, 5108 so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that 5109 should be accounted for (see {@link 5110 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration|ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS}). 5111 On a device that supports both depth and color-based output, to enable smooth preview, 5112 using a repeating burst is recommended, where a depth-output target is only included 5113 once every N frames, where N is the ratio between preview output rate and depth output 5114 rate, including depth stall time. 5115 </notes> 5116 </value> 5117 <value optional="true" ndk_hidden="true">CONSTRAINED_HIGH_SPEED_VIDEO 5118 <notes> 5119 The device supports constrained high speed video recording (frame rate >=120fps) use 5120 case. The camera device will support high speed capture session created by {@link 5121 android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}, which 5122 only accepts high speed request lists created by {@link 5123 android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}. 5124 5125 A camera device can still support high speed video streaming by advertising the high 5126 speed FPS ranges in android.control.aeAvailableTargetFpsRanges. For this case, all 5127 normal capture request per frame control and synchronization requirements will apply 5128 to the high speed fps ranges, the same as all other fps ranges. This capability 5129 describes the capability of a specialized operating mode with many limitations (see 5130 below), which is only targeted at high speed video recording. 5131 5132 The supported high speed video sizes and fps ranges are specified in {@link 5133 android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}. 5134 To get desired output frame rates, the application is only allowed to select video 5135 size and FPS range combinations provided by {@link 5136 android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}. The 5137 fps range can be controlled via android.control.aeTargetFpsRange. 5138 5139 In this capability, the camera device will override aeMode, awbMode, and afMode to 5140 ON, AUTO, and CONTINUOUS_VIDEO, respectively. All post-processing block mode 5141 controls will be overridden to be FAST. Therefore, no manual control of capture 5142 and post-processing parameters is possible. All other controls operate the 5143 same as when android.control.mode == AUTO. This means that all other 5144 android.control.* fields continue to work, such as 5145 5146 * android.control.aeTargetFpsRange 5147 * android.control.aeExposureCompensation 5148 * android.control.aeLock 5149 * android.control.awbLock 5150 * android.control.effectMode 5151 * android.control.aeRegions 5152 * android.control.afRegions 5153 * android.control.awbRegions 5154 * android.control.afTrigger 5155 * android.control.aePrecaptureTrigger 5156 5157 Outside of android.control.*, the following controls will work: 5158 5159 * android.flash.mode (TORCH mode only, automatic flash for still capture will not 5160 work since aeMode is ON) 5161 * android.lens.opticalStabilizationMode (if it is supported) 5162 * android.scaler.cropRegion 5163 * android.statistics.faceDetectMode (if it is supported) 5164 5165 For high speed recording use case, the actual maximum supported frame rate may 5166 be lower than what camera can output, depending on the destination Surfaces for 5167 the image data. For example, if the destination surface is from video encoder, 5168 the application need check if the video encoder is capable of supporting the 5169 high frame rate for a given video size, or it will end up with lower recording 5170 frame rate. If the destination surface is from preview window, the actual preview frame 5171 rate will be bounded by the screen refresh rate. 5172 5173 The camera device will only support up to 2 high speed simultaneous output surfaces 5174 (preview and recording surfaces) in this mode. Above controls will be effective only 5175 if all of below conditions are true: 5176 5177 * The application creates a camera capture session with no more than 2 surfaces via 5178 {@link 5179 android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. The 5180 targeted surfaces must be preview surface (either from {@link 5181 android.view.SurfaceView} or {@link android.graphics.SurfaceTexture}) or recording 5182 surface(either from {@link android.media.MediaRecorder#getSurface} or {@link 5183 android.media.MediaCodec#createInputSurface}). 5184 * The stream sizes are selected from the sizes reported by 5185 {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}. 5186 * The FPS ranges are selected from {@link 5187 android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}. 5188 5189 When above conditions are NOT satistied, 5190 {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession} 5191 will fail. 5192 5193 Switching to a FPS range that has different maximum FPS may trigger some camera device 5194 reconfigurations, which may introduce extra latency. It is recommended that 5195 the application avoids unnecessary maximum target FPS changes as much as possible 5196 during high speed streaming. 5197 </notes> 5198 </value> 5199 <value optional="true" hal_version="3.3" >MOTION_TRACKING 5200 <notes> 5201 The camera device supports the MOTION_TRACKING value for 5202 android.control.captureIntent, which limits maximum exposure time to 20 ms. 5203 5204 This limits the motion blur of capture images, resulting in better image tracking 5205 results for use cases such as image stabilization or augmented reality. 5206 </notes> 5207 </value> 5208 <value optional="true" hal_version="3.3">LOGICAL_MULTI_CAMERA 5209 <notes> 5210 The camera device is a logical camera backed by two or more physical cameras that are 5211 also exposed to the application. 5212 5213 Camera application shouldn't assume that there are at most 1 rear camera and 1 front 5214 camera in the system. For an application that switches between front and back cameras, 5215 the recommendation is to switch between the first rear camera and the first front 5216 camera in the list of supported camera devices. 5217 5218 This capability requires the camera device to support the following: 5219 5220 * This camera device must list the following static metadata entries in {@link 5221 android.hardware.camera2.CameraCharacteristics}: 5222 - android.logicalMultiCamera.physicalIds 5223 - android.logicalMultiCamera.sensorSyncType 5224 * The underlying physical cameras' static metadata must list the following entries, 5225 so that the application can correlate pixels from the physical streams: 5226 - android.lens.poseReference 5227 - android.lens.poseRotation 5228 - android.lens.poseTranslation 5229 - android.lens.intrinsicCalibration 5230 - android.lens.distortion 5231 * The SENSOR_INFO_TIMESTAMP_SOURCE of the logical device and physical devices must be 5232 the same. 5233 * The logical camera device must be LIMITED or higher device. 5234 5235 Both the logical camera device and its underlying physical devices support the 5236 mandatory stream combinations required for their device levels. 5237 5238 Additionally, for each guaranteed stream combination, the logical camera supports: 5239 5240 * For each guaranteed stream combination, the logical camera supports replacing one 5241 logical {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888 YUV_420_888} 5242 or raw stream with two physical streams of the same size and format, each from a 5243 separate physical camera, given that the size and format are supported by both 5244 physical cameras. 5245 * If the logical camera doesn't advertise RAW capability, but the underlying physical 5246 cameras do, the logical camera will support guaranteed stream combinations for RAW 5247 capability, except that the RAW streams will be physical streams, each from a separate 5248 physical camera. This is usually the case when the physical cameras have different 5249 sensor sizes. 5250 5251 Using physical streams in place of a logical stream of the same size and format will 5252 not slow down the frame rate of the capture, as long as the minimum frame duration 5253 of the physical and logical streams are the same. 5254 </notes> 5255 </value> 5256 <value optional="true" hal_version="3.3" >MONOCHROME 5257 <notes> 5258 The camera device is a monochrome camera that doesn't contain a color filter array, 5259 and the pixel values on U and V planes are all 128. 5260 </notes> 5261 </value> 5262 5263 </enum> 5264 <description>List of capabilities that this camera device 5265 advertises as fully supporting.</description> 5266 <details> 5267 A capability is a contract that the camera device makes in order 5268 to be able to satisfy one or more use cases. 5269 5270 Listing a capability guarantees that the whole set of features 5271 required to support a common use will all be available. 5272 5273 Using a subset of the functionality provided by an unsupported 5274 capability may be possible on a specific camera device implementation; 5275 to do this query each of android.request.availableRequestKeys, 5276 android.request.availableResultKeys, 5277 android.request.availableCharacteristicsKeys. 5278 5279 The following capabilities are guaranteed to be available on 5280 android.info.supportedHardwareLevel `==` FULL devices: 5281 5282 * MANUAL_SENSOR 5283 * MANUAL_POST_PROCESSING 5284 5285 Other capabilities may be available on either FULL or LIMITED 5286 devices, but the application should query this key to be sure. 5287 </details> 5288 <hal_details> 5289 Additional constraint details per-capability will be available 5290 in the Compatibility Test Suite. 5291 5292 Minimum baseline requirements required for the 5293 BACKWARD_COMPATIBLE capability are not explicitly listed. 5294 Instead refer to "BC" tags and the camera CTS tests in the 5295 android.hardware.camera2.cts package. 5296 5297 Listed controls that can be either request or result (e.g. 5298 android.sensor.exposureTime) must be available both in the 5299 request and the result in order to be considered to be 5300 capability-compliant. 5301 5302 For example, if the HAL claims to support MANUAL control, 5303 then exposure time must be configurable via the request _and_ 5304 the actual exposure applied must be available via 5305 the result. 5306 5307 If MANUAL_SENSOR is omitted, the HAL may choose to omit the 5308 android.scaler.availableMinFrameDurations static property entirely. 5309 5310 For PRIVATE_REPROCESSING and YUV_REPROCESSING capabilities, see 5311 hardware/libhardware/include/hardware/camera3.h Section 10 for more information. 5312 5313 Devices that support the MANUAL_SENSOR capability must support the 5314 CAMERA3_TEMPLATE_MANUAL template defined in camera3.h. 5315 5316 Devices that support the PRIVATE_REPROCESSING capability or the 5317 YUV_REPROCESSING capability must support the 5318 CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template defined in camera3.h. 5319 5320 For DEPTH_OUTPUT, the depth-format keys 5321 android.depth.availableDepthStreamConfigurations, 5322 android.depth.availableDepthMinFrameDurations, 5323 android.depth.availableDepthStallDurations must be available, in 5324 addition to the other keys explicitly mentioned in the DEPTH_OUTPUT 5325 enum notes. The entry android.depth.maxDepthSamples must be available 5326 if the DEPTH_POINT_CLOUD format is supported (HAL pixel format BLOB, dataspace 5327 DEPTH). 5328 5329 For a camera device with LOGICAL_MULTI_CAMERA capability, it should operate in the 5330 same way as a physical camera device based on its hardware level and capabilities. 5331 It's recommended that its feature set is superset of that of individual physical cameras. 5332 5333 * In camera1 API, to maintain application compatibility, for each {logical_camera_id, 5334 physical_camera_1_id, physical_camera_2_id, ...} combination, where logical_camera_id 5335 is composed of physical_camera_N_id, camera framework will only advertise one camera id 5336 (within the combination) that is frontmost in the HAL published camera id list. 5337 5338 * Camera HAL is strongly recommended to advertise camera devices with best feature, 5339 power, performance, and latency tradeoffs at the front of the camera id list. 5340 5341 For MONOCHROME, the camera device must also advertise BACKWARD_COMPATIBLE capability, and 5342 it is exclusive of both RAW and MANUAL_POST_PROCESSING capabilities: 5343 5344 * To maintain backward compatibility, the camera device must support all 5345 BACKWARD_COMPATIBLE required keys. The android.control.awbAvailableModes key only contains 5346 AUTO, and android.control.awbState are either CONVERGED or LOCKED depending on 5347 android.control.awbLock. 5348 5349 * A monochrome device doesn't need to advertise DNG related optional metadata tags. 5350 5351 * android.colorCorrection.mode, android.colorCorrection.transform, and 5352 android.colorCorrection.gains are not applicable. So the camera device cannot 5353 be a FULL device. However, the HAL can still advertise other individual capabilites. 5354 5355 * If the device supports tonemap control, only android.tonemap.curveRed is used. 5356 CurveGreen and curveBlue are no-ops. 5357 </hal_details> 5358 </entry> 5359 <entry name="availableRequestKeys" type="int32" visibility="ndk_public" 5360 container="array" hwlevel="legacy"> 5361 <array> 5362 <size>n</size> 5363 </array> 5364 <description>A list of all keys that the camera device has available 5365 to use with {@link android.hardware.camera2.CaptureRequest|ACaptureRequest}.</description> 5366 5367 <details>Attempting to set a key into a CaptureRequest that is not 5368 listed here will result in an invalid request and will be rejected 5369 by the camera device. 5370 5371 This field can be used to query the feature set of a camera device 5372 at a more granular level than capabilities. This is especially 5373 important for optional keys that are not listed under any capability 5374 in android.request.availableCapabilities. 5375 </details> 5376 <hal_details> 5377 Vendor tags can be listed here. Vendor tag metadata should also 5378 use the extensions C api (refer to camera3.h for more details). 5379 5380 Setting/getting vendor tags will be checked against the metadata 5381 vendor extensions API and not against this field. 5382 5383 The HAL must not consume any request tags that are not listed either 5384 here or in the vendor tag list. 5385 5386 The public camera2 API will always make the vendor tags visible 5387 via 5388 {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}. 5389 </hal_details> 5390 </entry> 5391 <entry name="availableResultKeys" type="int32" visibility="ndk_public" 5392 container="array" hwlevel="legacy"> 5393 <array> 5394 <size>n</size> 5395 </array> 5396 <description>A list of all keys that the camera device has available to use with {@link 5397 android.hardware.camera2.CaptureResult|ACameraCaptureSession_captureCallback_result}. 5398 </description> 5399 5400 <details>Attempting to get a key from a CaptureResult that is not 5401 listed here will always return a `null` value. Getting a key from 5402 a CaptureResult that is listed here will generally never return a `null` 5403 value. 5404 5405 The following keys may return `null` unless they are enabled: 5406 5407 * android.statistics.lensShadingMap (non-null iff android.statistics.lensShadingMapMode == ON) 5408 5409 (Those sometimes-null keys will nevertheless be listed here 5410 if they are available.) 5411 5412 This field can be used to query the feature set of a camera device 5413 at a more granular level than capabilities. This is especially 5414 important for optional keys that are not listed under any capability 5415 in android.request.availableCapabilities. 5416 </details> 5417 <hal_details> 5418 Tags listed here must always have an entry in the result metadata, 5419 even if that size is 0 elements. Only array-type tags (e.g. lists, 5420 matrices, strings) are allowed to have 0 elements. 5421 5422 Vendor tags can be listed here. Vendor tag metadata should also 5423 use the extensions C api (refer to camera3.h for more details). 5424 5425 Setting/getting vendor tags will be checked against the metadata 5426 vendor extensions API and not against this field. 5427 5428 The HAL must not produce any result tags that are not listed either 5429 here or in the vendor tag list. 5430 5431 The public camera2 API will always make the vendor tags visible via {@link 5432 android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}. 5433 </hal_details> 5434 </entry> 5435 <entry name="availableCharacteristicsKeys" type="int32" visibility="ndk_public" 5436 container="array" hwlevel="legacy"> 5437 <array> 5438 <size>n</size> 5439 </array> 5440 <description>A list of all keys that the camera device has available to use with {@link 5441 android.hardware.camera2.CameraCharacteristics|ACameraManager_getCameraCharacteristics}. 5442 </description> 5443 <details>This entry follows the same rules as 5444 android.request.availableResultKeys (except that it applies for 5445 CameraCharacteristics instead of CaptureResult). See above for more 5446 details. 5447 </details> 5448 <hal_details> 5449 Keys listed here must always have an entry in the static info metadata, 5450 even if that size is 0 elements. Only array-type tags (e.g. lists, 5451 matrices, strings) are allowed to have 0 elements. 5452 5453 Vendor tags can listed here. Vendor tag metadata should also use 5454 the extensions C api (refer to camera3.h for more details). 5455 5456 Setting/getting vendor tags will be checked against the metadata 5457 vendor extensions API and not against this field. 5458 5459 The HAL must not have any tags in its static info that are not listed 5460 either here or in the vendor tag list. 5461 5462 The public camera2 API will always make the vendor tags visible 5463 via {@link android.hardware.camera2.CameraCharacteristics#getKeys}. 5464 </hal_details> 5465 </entry> 5466 <entry name="availableSessionKeys" type="int32" visibility="ndk_public" 5467 container="array" hwlevel="legacy" hal_version="3.3"> 5468 <array> 5469 <size>n</size> 5470 </array> 5471 <description>A subset of the available request keys that the camera device 5472 can pass as part of the capture session initialization.</description> 5473 5474 <details> This is a subset of android.request.availableRequestKeys which 5475 contains a list of keys that are difficult to apply per-frame and 5476 can result in unexpected delays when modified during the capture session 5477 lifetime. Typical examples include parameters that require a 5478 time-consuming hardware re-configuration or internal camera pipeline 5479 change. For performance reasons we advise clients to pass their initial 5480 values as part of 5481 {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}. 5482 Once the camera capture session is enabled it is also recommended to avoid 5483 changing them from their initial values set in 5484 {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}. 5485 Control over session parameters can still be exerted in capture requests 5486 but clients should be aware and expect delays during their application. 5487 An example usage scenario could look like this: 5488 5489 * The camera client starts by quering the session parameter key list via 5490 {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys|ACameraManager_getCameraCharacteristics}. 5491 * Before triggering the capture session create sequence, a capture request 5492 must be built via 5493 {@link CameraDevice#createCaptureRequest|ACameraDevice_createCaptureRequest} 5494 using an appropriate template matching the particular use case. 5495 * The client should go over the list of session parameters and check 5496 whether some of the keys listed matches with the parameters that 5497 they intend to modify as part of the first capture request. 5498 * If there is no such match, the capture request can be passed 5499 unmodified to 5500 {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}. 5501 * If matches do exist, the client should update the respective values 5502 and pass the request to 5503 {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}. 5504 * After the capture session initialization completes the session parameter 5505 key list can continue to serve as reference when posting or updating 5506 further requests. As mentioned above further changes to session 5507 parameters should ideally be avoided, if updates are necessary 5508 however clients could expect a delay/glitch during the 5509 parameter switch. 5510 5511 </details> 5512 <hal_details> 5513 If android.control.aeTargetFpsRange is part of the session parameters and constrained high 5514 speed mode is enabled, then only modifications of the maximum framerate value will be 5515 monitored by the framework and can trigger camera re-configuration. For more information 5516 about framerate ranges during constrained high speed sessions see 5517 {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. 5518 Vendor tags can be listed here. Vendor tag metadata should also 5519 use the extensions C api (refer to 5520 android.hardware.camera.device.V3_4.StreamConfiguration.sessionParams for more details). 5521 5522 Setting/getting vendor tags will be checked against the metadata 5523 vendor extensions API and not against this field. 5524 5525 The HAL must not consume any request tags in the session parameters that 5526 are not listed either here or in the vendor tag list. 5527 5528 The public camera2 API will always make the vendor tags visible 5529 via 5530 {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys}. 5531 </hal_details> 5532 </entry> 5533 <entry name="availablePhysicalCameraRequestKeys" type="int32" visibility="hidden" 5534 container="array" hwlevel="limited" hal_version="3.3"> 5535 <array> 5536 <size>n</size> 5537 </array> 5538 <description>A subset of the available request keys that can be overriden for 5539 physical devices backing a logical multi-camera.</description> 5540 <details> 5541 This is a subset of android.request.availableRequestKeys which contains a list 5542 of keys that can be overriden using {@link CaptureRequest.Builder#setPhysicalCameraKey}. 5543 The respective value of such request key can be obtained by calling 5544 {@link CaptureRequest.Builder#getPhysicalCameraKey}. Capture requests that contain 5545 individual physical device requests must be built via 5546 {@link android.hardware.camera2.CameraDevice#createCaptureRequest(int, Set)}. 5547 </details> 5548 <hal_details> 5549 Vendor tags can be listed here. Vendor tag metadata should also 5550 use the extensions C api (refer to 5551 android.hardware.camera.device.V3_4.CaptureRequest.physicalCameraSettings for more 5552 details). 5553 5554 Setting/getting vendor tags will be checked against the metadata 5555 vendor extensions API and not against this field. 5556 5557 The HAL must not consume any request tags in the session parameters that 5558 are not listed either here or in the vendor tag list. 5559 5560 There should be no overlap between this set of keys and the available session keys 5561 {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys} along 5562 with any other controls that can have impact on the dual-camera sync. 5563 5564 The public camera2 API will always make the vendor tags visible 5565 via 5566 {@link android.hardware.camera2.CameraCharacteristics#getAvailablePhysicalCameraRequestKeys}. 5567 </hal_details> 5568 </entry> 5569 </static> 5570 </section> 5571 <section name="scaler"> 5572 <controls> 5573 <entry name="cropRegion" type="int32" visibility="public" 5574 container="array" typedef="rectangle" hwlevel="legacy"> 5575 <array> 5576 <size>4</size> 5577 </array> 5578 <description>The desired region of the sensor to read out for this capture.</description> 5579 <units>Pixel coordinates relative to 5580 android.sensor.info.activeArraySize</units> 5581 <details> 5582 This control can be used to implement digital zoom. 5583 5584 The crop region coordinate system is based off 5585 android.sensor.info.activeArraySize, with `(0, 0)` being the 5586 top-left corner of the sensor active array. 5587 5588 Output streams use this rectangle to produce their output, 5589 cropping to a smaller region if necessary to maintain the 5590 stream's aspect ratio, then scaling the sensor input to 5591 match the output's configured resolution. 5592 5593 The crop region is applied after the RAW to other color 5594 space (e.g. YUV) conversion. Since raw streams 5595 (e.g. RAW16) don't have the conversion stage, they are not 5596 croppable. The crop region will be ignored by raw streams. 5597 5598 For non-raw streams, any additional per-stream cropping will 5599 be done to maximize the final pixel area of the stream. 5600 5601 For example, if the crop region is set to a 4:3 aspect 5602 ratio, then 4:3 streams will use the exact crop 5603 region. 16:9 streams will further crop vertically 5604 (letterbox). 5605 5606 Conversely, if the crop region is set to a 16:9, then 4:3 5607 outputs will crop horizontally (pillarbox), and 16:9 5608 streams will match exactly. These additional crops will 5609 be centered within the crop region. 5610 5611 The width and height of the crop region cannot 5612 be set to be smaller than 5613 `floor( activeArraySize.width / android.scaler.availableMaxDigitalZoom )` and 5614 `floor( activeArraySize.height / android.scaler.availableMaxDigitalZoom )`, respectively. 5615 5616 The camera device may adjust the crop region to account 5617 for rounding and other hardware requirements; the final 5618 crop region used will be included in the output capture 5619 result. 5620 </details> 5621 <ndk_details> 5622 The data representation is int[4], which maps to (left, top, width, height). 5623 </ndk_details> 5624 <hal_details> 5625 The output streams must maintain square pixels at all 5626 times, no matter what the relative aspect ratios of the 5627 crop region and the stream are. Negative values for 5628 corner are allowed for raw output if full pixel array is 5629 larger than active pixel array. Width and height may be 5630 rounded to nearest larger supportable width, especially 5631 for raw output, where only a few fixed scales may be 5632 possible. 5633 5634 For a set of output streams configured, if the sensor output is cropped to a smaller 5635 size than active array size, the HAL need follow below cropping rules: 5636 5637 * The HAL need handle the cropRegion as if the sensor crop size is the effective active 5638 array size.More specifically, the HAL must transform the request cropRegion from 5639 android.sensor.info.activeArraySize to the sensor cropped pixel area size in this way: 5640 1. Translate the requested cropRegion w.r.t., the left top corner of the sensor 5641 cropped pixel area by (tx, ty), 5642 where `ty = sensorCrop.top * (sensorCrop.height / activeArraySize.height)` 5643 and `tx = sensorCrop.left * (sensorCrop.width / activeArraySize.width)`. The 5644 (sensorCrop.top, sensorCrop.left) is the coordinate based off the 5645 android.sensor.info.activeArraySize. 5646 2. Scale the width and height of requested cropRegion with scaling factor of 5647 sensorCrop.width/activeArraySize.width and sensorCrop.height/activeArraySize.height 5648 respectively. 5649 Once this new cropRegion is calculated, the HAL must use this region to crop the image 5650 with regard to the sensor crop size (effective active array size). The HAL still need 5651 follow the general cropping rule for this new cropRegion and effective active 5652 array size. 5653 5654 * The HAL must report the cropRegion with regard to android.sensor.info.activeArraySize. 5655 The HAL need convert the new cropRegion generated above w.r.t., full active array size. 5656 The reported cropRegion may be slightly different with the requested cropRegion since 5657 the HAL may adjust the crop region to account for rounding, conversion error, or other 5658 hardware limitations. 5659 5660 HAL2.x uses only (x, y, width) 5661 </hal_details> 5662 <tag id="BC" /> 5663 </entry> 5664 </controls> 5665 <static> 5666 <entry name="availableFormats" type="int32" 5667 visibility="hidden" deprecated="true" enum="true" 5668 container="array" typedef="imageFormat"> 5669 <array> 5670 <size>n</size> 5671 </array> 5672 <enum> 5673 <value optional="true" id="0x20">RAW16 5674 <notes> 5675 RAW16 is a standard, cross-platform format for raw image 5676 buffers with 16-bit pixels. 5677 5678 Buffers of this format are typically expected to have a 5679 Bayer Color Filter Array (CFA) layout, which is given in 5680 android.sensor.info.colorFilterArrangement. Sensors with 5681 CFAs that are not representable by a format in 5682 android.sensor.info.colorFilterArrangement should not 5683 use this format. 5684 5685 Buffers of this format will also follow the constraints given for 5686 RAW_OPAQUE buffers, but with relaxed performance constraints. 5687 5688 This format is intended to give users access to the full contents 5689 of the buffers coming directly from the image sensor prior to any 5690 cropping or scaling operations, and all coordinate systems for 5691 metadata used for this format are relative to the size of the 5692 active region of the image sensor before any geometric distortion 5693 correction has been applied (i.e. 5694 android.sensor.info.preCorrectionActiveArraySize). Supported 5695 dimensions for this format are limited to the full dimensions of 5696 the sensor (e.g. either android.sensor.info.pixelArraySize or 5697 android.sensor.info.preCorrectionActiveArraySize will be the 5698 only supported output size). 5699 5700 See android.scaler.availableInputOutputFormatsMap for 5701 the full set of performance guarantees. 5702 </notes> 5703 </value> 5704 <value optional="true" id="0x24">RAW_OPAQUE 5705 <notes> 5706 RAW_OPAQUE (or 5707 {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE} 5708 as referred in public API) is a format for raw image buffers 5709 coming from an image sensor. 5710 5711 The actual structure of buffers of this format is 5712 platform-specific, but must follow several constraints: 5713 5714 1. No image post-processing operations may have been applied to 5715 buffers of this type. These buffers contain raw image data coming 5716 directly from the image sensor. 5717 1. If a buffer of this format is passed to the camera device for 5718 reprocessing, the resulting images will be identical to the images 5719 produced if the buffer had come directly from the sensor and was 5720 processed with the same settings. 5721 5722 The intended use for this format is to allow access to the native 5723 raw format buffers coming directly from the camera sensor without 5724 any additional conversions or decrease in framerate. 5725 5726 See android.scaler.availableInputOutputFormatsMap for the full set of 5727 performance guarantees. 5728 </notes> 5729 </value> 5730 <value optional="true" id="0x32315659">YV12 5731 <notes>YCrCb 4:2:0 Planar</notes> 5732 </value> 5733 <value optional="true" id="0x11">YCrCb_420_SP 5734 <notes>NV21</notes> 5735 </value> 5736 <value id="0x22">IMPLEMENTATION_DEFINED 5737 <notes>System internal format, not application-accessible</notes> 5738 </value> 5739 <value id="0x23">YCbCr_420_888 5740 <notes>Flexible YUV420 Format</notes> 5741 </value> 5742 <value id="0x21">BLOB 5743 <notes>JPEG format</notes> 5744 </value> 5745 </enum> 5746 <description>The list of image formats that are supported by this 5747 camera device for output streams.</description> 5748 <deprecation_description> 5749 Not used in HALv3 or newer 5750 </deprecation_description> 5751 <details> 5752 All camera devices will support JPEG and YUV_420_888 formats. 5753 5754 When set to YUV_420_888, application can access the YUV420 data directly. 5755 </details> 5756 <hal_details> 5757 These format values are from HAL_PIXEL_FORMAT_* in 5758 system/core/include/system/graphics.h. 5759 5760 When IMPLEMENTATION_DEFINED is used, the platform 5761 gralloc module will select a format based on the usage flags provided 5762 by the camera HAL device and the other endpoint of the stream. It is 5763 usually used by preview and recording streams, where the application doesn't 5764 need access the image data. 5765 5766 YCbCr_420_888 format must be supported by the HAL. When an image stream 5767 needs CPU/application direct access, this format will be used. 5768 5769 The BLOB format must be supported by the HAL. This is used for the JPEG stream. 5770 5771 A RAW_OPAQUE buffer should contain only pixel data. It is strongly 5772 recommended that any information used by the camera device when 5773 processing images is fully expressed by the result metadata 5774 for that image buffer. 5775 </hal_details> 5776 <tag id="BC" /> 5777 </entry> 5778 <entry name="availableJpegMinDurations" type="int64" visibility="hidden" deprecated="true" 5779 container="array"> 5780 <array> 5781 <size>n</size> 5782 </array> 5783 <description>The minimum frame duration that is supported 5784 for each resolution in android.scaler.availableJpegSizes. 5785 </description> 5786 <deprecation_description> 5787 Not used in HALv3 or newer 5788 </deprecation_description> 5789 <units>Nanoseconds</units> 5790 <range>TODO: Remove property.</range> 5791 <details> 5792 This corresponds to the minimum steady-state frame duration when only 5793 that JPEG stream is active and captured in a burst, with all 5794 processing (typically in android.*.mode) set to FAST. 5795 5796 When multiple streams are configured, the minimum 5797 frame duration will be &gt;= max(individual stream min 5798 durations)</details> 5799 <tag id="BC" /> 5800 </entry> 5801 <entry name="availableJpegSizes" type="int32" visibility="hidden" 5802 deprecated="true" container="array" typedef="size"> 5803 <array> 5804 <size>n</size> 5805 <size>2</size> 5806 </array> 5807 <description>The JPEG resolutions that are supported by this camera device.</description> 5808 <deprecation_description> 5809 Not used in HALv3 or newer 5810 </deprecation_description> 5811 <range>TODO: Remove property.</range> 5812 <details> 5813 The resolutions are listed as `(width, height)` pairs. All camera devices will support 5814 sensor maximum resolution (defined by android.sensor.info.activeArraySize). 5815 </details> 5816 <hal_details> 5817 The HAL must include sensor maximum resolution 5818 (defined by android.sensor.info.activeArraySize), 5819 and should include half/quarter of sensor maximum resolution. 5820 </hal_details> 5821 <tag id="BC" /> 5822 </entry> 5823 <entry name="availableMaxDigitalZoom" type="float" visibility="public" 5824 hwlevel="legacy"> 5825 <description>The maximum ratio between both active area width 5826 and crop region width, and active area height and 5827 crop region height, for android.scaler.cropRegion. 5828 </description> 5829 <units>Zoom scale factor</units> 5830 <range>&gt;=1</range> 5831 <details> 5832 This represents the maximum amount of zooming possible by 5833 the camera device, or equivalently, the minimum cropping 5834 window size. 5835 5836 Crop regions that have a width or height that is smaller 5837 than this ratio allows will be rounded up to the minimum 5838 allowed size by the camera device. 5839 </details> 5840 <tag id="BC" /> 5841 </entry> 5842 <entry name="availableProcessedMinDurations" type="int64" visibility="hidden" deprecated="true" 5843 container="array"> 5844 <array> 5845 <size>n</size> 5846 </array> 5847 <description>For each available processed output size (defined in 5848 android.scaler.availableProcessedSizes), this property lists the 5849 minimum supportable frame duration for that size. 5850 </description> 5851 <deprecation_description> 5852 Not used in HALv3 or newer 5853 </deprecation_description> 5854 <units>Nanoseconds</units> 5855 <details> 5856 This should correspond to the frame duration when only that processed 5857 stream is active, with all processing (typically in android.*.mode) 5858 set to FAST. 5859 5860 When multiple streams are configured, the minimum frame duration will 5861 be &gt;= max(individual stream min durations). 5862 </details> 5863 <tag id="BC" /> 5864 </entry> 5865 <entry name="availableProcessedSizes" type="int32" visibility="hidden" 5866 deprecated="true" container="array" typedef="size"> 5867 <array> 5868 <size>n</size> 5869 <size>2</size> 5870 </array> 5871 <description>The resolutions available for use with 5872 processed output streams, such as YV12, NV12, and 5873 platform opaque YUV/RGB streams to the GPU or video 5874 encoders.</description> 5875 <deprecation_description> 5876 Not used in HALv3 or newer 5877 </deprecation_description> 5878 <details> 5879 The resolutions are listed as `(width, height)` pairs. 5880 5881 For a given use case, the actual maximum supported resolution 5882 may be lower than what is listed here, depending on the destination 5883 Surface for the image data. For example, for recording video, 5884 the video encoder chosen may have a maximum size limit (e.g. 1080p) 5885 smaller than what the camera (e.g. maximum resolution is 3264x2448) 5886 can provide. 5887 5888 Please reference the documentation for the image data destination to 5889 check if it limits the maximum size for image data. 5890 </details> 5891 <hal_details> 5892 For FULL capability devices (`android.info.supportedHardwareLevel == FULL`), 5893 the HAL must include all JPEG sizes listed in android.scaler.availableJpegSizes 5894 and each below resolution if it is smaller than or equal to the sensor 5895 maximum resolution (if they are not listed in JPEG sizes already): 5896 5897 * 240p (320 x 240) 5898 * 480p (640 x 480) 5899 * 720p (1280 x 720) 5900 * 1080p (1920 x 1080) 5901 5902 For LIMITED capability devices (`android.info.supportedHardwareLevel == LIMITED`), 5903 the HAL only has to list up to the maximum video size supported by the devices. 5904 </hal_details> 5905 <tag id="BC" /> 5906 </entry> 5907 <entry name="availableRawMinDurations" type="int64" deprecated="true" 5908 container="array"> 5909 <array> 5910 <size>n</size> 5911 </array> 5912 <description> 5913 For each available raw output size (defined in 5914 android.scaler.availableRawSizes), this property lists the minimum 5915 supportable frame duration for that size. 5916 </description> 5917 <deprecation_description> 5918 Not used in HALv3 or newer 5919 </deprecation_description> 5920 <units>Nanoseconds</units> 5921 <details> 5922 Should correspond to the frame duration when only the raw stream is 5923 active. 5924 5925 When multiple streams are configured, the minimum 5926 frame duration will be &gt;= max(individual stream min 5927 durations)</details> 5928 <tag id="BC" /> 5929 </entry> 5930 <entry name="availableRawSizes" type="int32" deprecated="true" 5931 container="array" typedef="size"> 5932 <array> 5933 <size>n</size> 5934 <size>2</size> 5935 </array> 5936 <description>The resolutions available for use with raw 5937 sensor output streams, listed as width, 5938 height</description> 5939 <deprecation_description> 5940 Not used in HALv3 or newer 5941 </deprecation_description> 5942 </entry> 5943 </static> 5944 <dynamic> 5945 <clone entry="android.scaler.cropRegion" kind="controls"> 5946 </clone> 5947 </dynamic> 5948 <static> 5949 <entry name="availableInputOutputFormatsMap" type="int32" visibility="hidden" 5950 typedef="reprocessFormatsMap"> 5951 <description>The mapping of image formats that are supported by this 5952 camera device for input streams, to their corresponding output formats. 5953 </description> 5954 <details> 5955 All camera devices with at least 1 5956 android.request.maxNumInputStreams will have at least one 5957 available input format. 5958 5959 The camera device will support the following map of formats, 5960 if its dependent capability (android.request.availableCapabilities) is supported: 5961 5962 Input Format | Output Format | Capability 5963 :-------------------------------------------------|:--------------------------------------------------|:---------- 5964 {@link android.graphics.ImageFormat#PRIVATE} | {@link android.graphics.ImageFormat#JPEG} | PRIVATE_REPROCESSING 5965 {@link android.graphics.ImageFormat#PRIVATE} | {@link android.graphics.ImageFormat#YUV_420_888} | PRIVATE_REPROCESSING 5966 {@link android.graphics.ImageFormat#YUV_420_888} | {@link android.graphics.ImageFormat#JPEG} | YUV_REPROCESSING 5967 {@link android.graphics.ImageFormat#YUV_420_888} | {@link android.graphics.ImageFormat#YUV_420_888} | YUV_REPROCESSING 5968 5969 PRIVATE refers to a device-internal format that is not directly application-visible. A 5970 PRIVATE input surface can be acquired by {@link android.media.ImageReader#newInstance} 5971 with {@link android.graphics.ImageFormat#PRIVATE} as the format. 5972 5973 For a PRIVATE_REPROCESSING-capable camera device, using the PRIVATE format as either input 5974 or output will never hurt maximum frame rate (i.e. {@link 5975 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration 5976 getOutputStallDuration(ImageFormat.PRIVATE, size)} is always 0), 5977 5978 Attempting to configure an input stream with output streams not 5979 listed as available in this map is not valid. 5980 </details> 5981 <hal_details> 5982 For the formats, see `system/core/include/system/graphics.h` for a definition 5983 of the image format enumerations. The PRIVATE format refers to the 5984 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format. The HAL could determine 5985 the actual format by using the gralloc usage flags. 5986 For ZSL use case in particular, the HAL could choose appropriate format (partially 5987 processed YUV or RAW based format) by checking the format and GRALLOC_USAGE_HW_CAMERA_ZSL. 5988 See camera3.h for more details. 5989 5990 This value is encoded as a variable-size array-of-arrays. 5991 The inner array always contains `[format, length, ...]` where 5992 `...` has `length` elements. An inner array is followed by another 5993 inner array if the total metadata entry size hasn't yet been exceeded. 5994 5995 A code sample to read/write this encoding (with a device that 5996 supports reprocessing IMPLEMENTATION_DEFINED to YUV_420_888, and JPEG, 5997 and reprocessing YUV_420_888 to YUV_420_888 and JPEG): 5998 5999 // reading 6000 int32_t* contents = &entry.i32[0]; 6001 for (size_t i = 0; i < entry.count; ) { 6002 int32_t format = contents[i++]; 6003 int32_t length = contents[i++]; 6004 int32_t output_formats[length]; 6005 memcpy(&output_formats[0], &contents[i], 6006 length * sizeof(int32_t)); 6007 i += length; 6008 } 6009 6010 // writing (static example, PRIVATE_REPROCESSING + YUV_REPROCESSING) 6011 int32_t[] contents = { 6012 IMPLEMENTATION_DEFINED, 2, YUV_420_888, BLOB, 6013 YUV_420_888, 2, YUV_420_888, BLOB, 6014 }; 6015 update_camera_metadata_entry(metadata, index, &contents[0], 6016 sizeof(contents)/sizeof(contents[0]), &updated_entry); 6017 6018 If the HAL claims to support any of the capabilities listed in the 6019 above details, then it must also support all the input-output 6020 combinations listed for that capability. It can optionally support 6021 additional formats if it so chooses. 6022 </hal_details> 6023 <tag id="REPROC" /> 6024 </entry> 6025 <entry name="availableStreamConfigurations" type="int32" visibility="ndk_public" 6026 enum="true" container="array" typedef="streamConfiguration" hwlevel="legacy"> 6027 <array> 6028 <size>n</size> 6029 <size>4</size> 6030 </array> 6031 <enum> 6032 <value>OUTPUT</value> 6033 <value>INPUT</value> 6034 </enum> 6035 <description>The available stream configurations that this 6036 camera device supports 6037 (i.e. format, width, height, output/input stream). 6038 </description> 6039 <details> 6040 The configurations are listed as `(format, width, height, input?)` 6041 tuples. 6042 6043 For a given use case, the actual maximum supported resolution 6044 may be lower than what is listed here, depending on the destination 6045 Surface for the image data. For example, for recording video, 6046 the video encoder chosen may have a maximum size limit (e.g. 1080p) 6047 smaller than what the camera (e.g. maximum resolution is 3264x2448) 6048 can provide. 6049 6050 Please reference the documentation for the image data destination to 6051 check if it limits the maximum size for image data. 6052 6053 Not all output formats may be supported in a configuration with 6054 an input stream of a particular format. For more details, see 6055 android.scaler.availableInputOutputFormatsMap. 6056 6057 The following table describes the minimum required output stream 6058 configurations based on the hardware level 6059 (android.info.supportedHardwareLevel): 6060 6061 Format | Size | Hardware Level | Notes 6062 :-------------:|:--------------------------------------------:|:--------------:|:--------------: 6063 JPEG | android.sensor.info.activeArraySize | Any | 6064 JPEG | 1920x1080 (1080p) | Any | if 1080p <= activeArraySize 6065 JPEG | 1280x720 (720) | Any | if 720p <= activeArraySize 6066 JPEG | 640x480 (480p) | Any | if 480p <= activeArraySize 6067 JPEG | 320x240 (240p) | Any | if 240p <= activeArraySize 6068 YUV_420_888 | all output sizes available for JPEG | FULL | 6069 YUV_420_888 | all output sizes available for JPEG, up to the maximum video size | LIMITED | 6070 IMPLEMENTATION_DEFINED | same as YUV_420_888 | Any | 6071 6072 Refer to android.request.availableCapabilities for additional 6073 mandatory stream configurations on a per-capability basis. 6074 </details> 6075 <hal_details> 6076 It is recommended (but not mandatory) to also include half/quarter 6077 of sensor maximum resolution for JPEG formats (regardless of hardware 6078 level). 6079 6080 (The following is a rewording of the above required table): 6081 6082 For JPEG format, the sizes may be restricted by below conditions: 6083 6084 * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones 6085 (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution 6086 (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these, 6087 it does not have to be included in the supported JPEG sizes. 6088 * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as 6089 the dimensions being a multiple of 16. 6090 6091 Therefore, the maximum JPEG size may be smaller than sensor maximum resolution. 6092 However, the largest JPEG size must be as close as possible to the sensor maximum 6093 resolution given above constraints. It is required that after aspect ratio adjustments, 6094 additional size reduction due to other issues must be less than 3% in area. For example, 6095 if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect 6096 ratio 4:3, the JPEG encoder alignment requirement is 16, the maximum JPEG size will be 6097 3264x2448. 6098 6099 For FULL capability devices (`android.info.supportedHardwareLevel == FULL`), 6100 the HAL must include all YUV_420_888 sizes that have JPEG sizes listed 6101 here as output streams. 6102 6103 It must also include each below resolution if it is smaller than or 6104 equal to the sensor maximum resolution (for both YUV_420_888 and JPEG 6105 formats), as output streams: 6106 6107 * 240p (320 x 240) 6108 * 480p (640 x 480) 6109 * 720p (1280 x 720) 6110 * 1080p (1920 x 1080) 6111 6112 For LIMITED capability devices 6113 (`android.info.supportedHardwareLevel == LIMITED`), 6114 the HAL only has to list up to the maximum video size 6115 supported by the device. 6116 6117 Regardless of hardware level, every output resolution available for 6118 YUV_420_888 must also be available for IMPLEMENTATION_DEFINED. 6119 6120 This supercedes the following fields, which are now deprecated: 6121 6122 * availableFormats 6123 * available[Processed,Raw,Jpeg]Sizes 6124 </hal_details> 6125 </entry> 6126 <entry name="availableMinFrameDurations" type="int64" visibility="ndk_public" 6127 container="array" typedef="streamConfigurationDuration" hwlevel="legacy"> 6128 <array> 6129 <size>4</size> 6130 <size>n</size> 6131 </array> 6132 <description>This lists the minimum frame duration for each 6133 format/size combination. 6134 </description> 6135 <units>(format, width, height, ns) x n</units> 6136 <details> 6137 This should correspond to the frame duration when only that 6138 stream is active, with all processing (typically in android.*.mode) 6139 set to either OFF or FAST. 6140 6141 When multiple streams are used in a request, the minimum frame 6142 duration will be max(individual stream min durations). 6143 6144 The minimum frame duration of a stream (of a particular format, size) 6145 is the same regardless of whether the stream is input or output. 6146 6147 See android.sensor.frameDuration and 6148 android.scaler.availableStallDurations for more details about 6149 calculating the max frame rate. 6150 </details> 6151 <tag id="V1" /> 6152 </entry> 6153 <entry name="availableStallDurations" type="int64" visibility="ndk_public" 6154 container="array" typedef="streamConfigurationDuration" hwlevel="legacy"> 6155 <array> 6156 <size>4</size> 6157 <size>n</size> 6158 </array> 6159 <description>This lists the maximum stall duration for each 6160 output format/size combination. 6161 </description> 6162 <units>(format, width, height, ns) x n</units> 6163 <details> 6164 A stall duration is how much extra time would get added 6165 to the normal minimum frame duration for a repeating request 6166 that has streams with non-zero stall. 6167 6168 For example, consider JPEG captures which have the following 6169 characteristics: 6170 6171 * JPEG streams act like processed YUV streams in requests for which 6172 they are not included; in requests in which they are directly 6173 referenced, they act as JPEG streams. This is because supporting a 6174 JPEG stream requires the underlying YUV data to always be ready for 6175 use by a JPEG encoder, but the encoder will only be used (and impact 6176 frame duration) on requests that actually reference a JPEG stream. 6177 * The JPEG processor can run concurrently to the rest of the camera 6178 pipeline, but cannot process more than 1 capture at a time. 6179 6180 In other words, using a repeating YUV request would result 6181 in a steady frame rate (let's say it's 30 FPS). If a single 6182 JPEG request is submitted periodically, the frame rate will stay 6183 at 30 FPS (as long as we wait for the previous JPEG to return each 6184 time). If we try to submit a repeating YUV + JPEG request, then 6185 the frame rate will drop from 30 FPS. 6186 6187 In general, submitting a new request with a non-0 stall time 6188 stream will _not_ cause a frame rate drop unless there are still 6189 outstanding buffers for that stream from previous requests. 6190 6191 Submitting a repeating request with streams (call this `S`) 6192 is the same as setting the minimum frame duration from 6193 the normal minimum frame duration corresponding to `S`, added with 6194 the maximum stall duration for `S`. 6195 6196 If interleaving requests with and without a stall duration, 6197 a request will stall by the maximum of the remaining times 6198 for each can-stall stream with outstanding buffers. 6199 6200 This means that a stalling request will not have an exposure start 6201 until the stall has completed. 6202 6203 This should correspond to the stall duration when only that stream is 6204 active, with all processing (typically in android.*.mode) set to FAST 6205 or OFF. Setting any of the processing modes to HIGH_QUALITY 6206 effectively results in an indeterminate stall duration for all 6207 streams in a request (the regular stall calculation rules are 6208 ignored). 6209 6210 The following formats may always have a stall duration: 6211 6212 * {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG} 6213 * {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16} 6214 6215 The following formats will never have a stall duration: 6216 6217 * {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888} 6218 * {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10} 6219 * {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12} 6220 6221 All other formats may or may not have an allowed stall duration on 6222 a per-capability basis; refer to android.request.availableCapabilities 6223 for more details. 6224 6225 See android.sensor.frameDuration for more information about 6226 calculating the max frame rate (absent stalls). 6227 </details> 6228 <hal_details> 6229 If possible, it is recommended that all non-JPEG formats 6230 (such as RAW16) should not have a stall duration. RAW10, RAW12, RAW_OPAQUE 6231 and IMPLEMENTATION_DEFINED must not have stall durations. 6232 </hal_details> 6233 <tag id="V1" /> 6234 </entry> 6235 <entry name="streamConfigurationMap" type="int32" visibility="java_public" 6236 synthetic="true" typedef="streamConfigurationMap" 6237 hwlevel="legacy"> 6238 <description>The available stream configurations that this 6239 camera device supports; also includes the minimum frame durations 6240 and the stall durations for each format/size combination. 6241 </description> 6242 <details> 6243 All camera devices will support sensor maximum resolution (defined by 6244 android.sensor.info.activeArraySize) for the JPEG format. 6245 6246 For a given use case, the actual maximum supported resolution 6247 may be lower than what is listed here, depending on the destination 6248 Surface for the image data. For example, for recording video, 6249 the video encoder chosen may have a maximum size limit (e.g. 1080p) 6250 smaller than what the camera (e.g. maximum resolution is 3264x2448) 6251 can provide. 6252 6253 Please reference the documentation for the image data destination to 6254 check if it limits the maximum size for image data. 6255 6256 The following table describes the minimum required output stream 6257 configurations based on the hardware level 6258 (android.info.supportedHardwareLevel): 6259 6260 Format | Size | Hardware Level | Notes 6261 :-------------------------------------------------:|:--------------------------------------------:|:--------------:|:--------------: 6262 {@link android.graphics.ImageFormat#JPEG} | android.sensor.info.activeArraySize (*1) | Any | 6263 {@link android.graphics.ImageFormat#JPEG} | 1920x1080 (1080p) | Any | if 1080p <= activeArraySize 6264 {@link android.graphics.ImageFormat#JPEG} | 1280x720 (720p) | Any | if 720p <= activeArraySize 6265 {@link android.graphics.ImageFormat#JPEG} | 640x480 (480p) | Any | if 480p <= activeArraySize 6266 {@link android.graphics.ImageFormat#JPEG} | 320x240 (240p) | Any | if 240p <= activeArraySize 6267 {@link android.graphics.ImageFormat#YUV_420_888} | all output sizes available for JPEG | FULL | 6268 {@link android.graphics.ImageFormat#YUV_420_888} | all output sizes available for JPEG, up to the maximum video size | LIMITED | 6269 {@link android.graphics.ImageFormat#PRIVATE} | same as YUV_420_888 | Any | 6270 6271 Refer to android.request.availableCapabilities and {@link 6272 android.hardware.camera2.CameraDevice#createCaptureSession} for additional mandatory 6273 stream configurations on a per-capability basis. 6274 6275 *1: For JPEG format, the sizes may be restricted by below conditions: 6276 6277 * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones 6278 (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution 6279 (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these, 6280 it does not have to be included in the supported JPEG sizes. 6281 * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as 6282 the dimensions being a multiple of 16. 6283 Therefore, the maximum JPEG size may be smaller than sensor maximum resolution. 6284 However, the largest JPEG size will be as close as possible to the sensor maximum 6285 resolution given above constraints. It is required that after aspect ratio adjustments, 6286 additional size reduction due to other issues must be less than 3% in area. For example, 6287 if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect 6288 ratio 4:3, and the JPEG encoder alignment requirement is 16, the maximum JPEG size will be 6289 3264x2448. 6290 </details> 6291 <hal_details> 6292 Do not set this property directly 6293 (it is synthetic and will not be available at the HAL layer); 6294 set the android.scaler.availableStreamConfigurations instead. 6295 6296 Not all output formats may be supported in a configuration with 6297 an input stream of a particular format. For more details, see 6298 android.scaler.availableInputOutputFormatsMap. 6299 6300 It is recommended (but not mandatory) to also include half/quarter 6301 of sensor maximum resolution for JPEG formats (regardless of hardware 6302 level). 6303 6304 (The following is a rewording of the above required table): 6305 6306 The HAL must include sensor maximum resolution (defined by 6307 android.sensor.info.activeArraySize). 6308 6309 For FULL capability devices (`android.info.supportedHardwareLevel == FULL`), 6310 the HAL must include all YUV_420_888 sizes that have JPEG sizes listed 6311 here as output streams. 6312 6313 It must also include each below resolution if it is smaller than or 6314 equal to the sensor maximum resolution (for both YUV_420_888 and JPEG 6315 formats), as output streams: 6316 6317 * 240p (320 x 240) 6318 * 480p (640 x 480) 6319 * 720p (1280 x 720) 6320 * 1080p (1920 x 1080) 6321 6322 For LIMITED capability devices 6323 (`android.info.supportedHardwareLevel == LIMITED`), 6324 the HAL only has to list up to the maximum video size 6325 supported by the device. 6326 6327 Regardless of hardware level, every output resolution available for 6328 YUV_420_888 must also be available for IMPLEMENTATION_DEFINED. 6329 6330 This supercedes the following fields, which are now deprecated: 6331 6332 * availableFormats 6333 * available[Processed,Raw,Jpeg]Sizes 6334 </hal_details> 6335 </entry> 6336 <entry name="croppingType" type="byte" visibility="public" enum="true" 6337 hwlevel="legacy"> 6338 <enum> 6339 <value>CENTER_ONLY 6340 <notes> 6341 The camera device only supports centered crop regions. 6342 </notes> 6343 </value> 6344 <value>FREEFORM 6345 <notes> 6346 The camera device supports arbitrarily chosen crop regions. 6347 </notes> 6348 </value> 6349 </enum> 6350 <description>The crop type that this camera device supports.</description> 6351 <details> 6352 When passing a non-centered crop region (android.scaler.cropRegion) to a camera 6353 device that only supports CENTER_ONLY cropping, the camera device will move the 6354 crop region to the center of the sensor active array (android.sensor.info.activeArraySize) 6355 and keep the crop region width and height unchanged. The camera device will return the 6356 final used crop region in metadata result android.scaler.cropRegion. 6357 6358 Camera devices that support FREEFORM cropping will support any crop region that 6359 is inside of the active array. The camera device will apply the same crop region and 6360 return the final used crop region in capture result metadata android.scaler.cropRegion. 6361 6362 LEGACY capability devices will only support CENTER_ONLY cropping. 6363 </details> 6364 </entry> 6365 </static> 6366 </section> 6367 <section name="sensor"> 6368 <controls> 6369 <entry name="exposureTime" type="int64" visibility="public" hwlevel="full"> 6370 <description>Duration each pixel is exposed to 6371 light.</description> 6372 <units>Nanoseconds</units> 6373 <range>android.sensor.info.exposureTimeRange</range> 6374 <details>If the sensor can't expose this exact duration, it will shorten the 6375 duration exposed to the nearest possible value (rather than expose longer). 6376 The final exposure time used will be available in the output capture result. 6377 6378 This control is only effective if android.control.aeMode or android.control.mode is set to 6379 OFF; otherwise the auto-exposure algorithm will override this value. 6380 </details> 6381 <tag id="V1" /> 6382 </entry> 6383 <entry name="frameDuration" type="int64" visibility="public" hwlevel="full"> 6384 <description>Duration from start of frame exposure to 6385 start of next frame exposure.</description> 6386 <units>Nanoseconds</units> 6387 <range>See android.sensor.info.maxFrameDuration, {@link 6388 android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}. 6389 The duration is capped to `max(duration, exposureTime + overhead)`.</range> 6390 <details> 6391 The maximum frame rate that can be supported by a camera subsystem is 6392 a function of many factors: 6393 6394 * Requested resolutions of output image streams 6395 * Availability of binning / skipping modes on the imager 6396 * The bandwidth of the imager interface 6397 * The bandwidth of the various ISP processing blocks 6398 6399 Since these factors can vary greatly between different ISPs and 6400 sensors, the camera abstraction tries to represent the bandwidth 6401 restrictions with as simple a model as possible. 6402 6403 The model presented has the following characteristics: 6404 6405 * The image sensor is always configured to output the smallest 6406 resolution possible given the application's requested output stream 6407 sizes. The smallest resolution is defined as being at least as large 6408 as the largest requested output stream size; the camera pipeline must 6409 never digitally upsample sensor data when the crop region covers the 6410 whole sensor. In general, this means that if only small output stream 6411 resolutions are configured, the sensor can provide a higher frame 6412 rate. 6413 * Since any request may use any or all the currently configured 6414 output streams, the sensor and ISP must be configured to support 6415 scaling a single capture to all the streams at the same time. This 6416 means the camera pipeline must be ready to produce the largest 6417 requested output size without any delay. Therefore, the overall 6418 frame rate of a given configured stream set is governed only by the 6419 largest requested stream resolution. 6420 * Using more than one output stream in a request does not affect the 6421 frame duration. 6422 * Certain format-streams may need to do additional background processing 6423 before data is consumed/produced by that stream. These processors 6424 can run concurrently to the rest of the camera pipeline, but 6425 cannot process more than 1 capture at a time. 6426 6427 The necessary information for the application, given the model above, is provided via 6428 {@link 6429 android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}. 6430 These are used to determine the maximum frame rate / minimum frame duration that is 6431 possible for a given stream configuration. 6432 6433 Specifically, the application can use the following rules to 6434 determine the minimum frame duration it can request from the camera 6435 device: 6436 6437 1. Let the set of currently configured input/output streams be called `S`. 6438 1. Find the minimum frame durations for each stream in `S`, by looking it up in {@link 6439 android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS} 6440 (with its respective size/format). Let this set of frame durations be called `F`. 6441 1. For any given request `R`, the minimum frame duration allowed for `R` is the maximum 6442 out of all values in `F`. Let the streams used in `R` be called `S_r`. 6443 6444 If none of the streams in `S_r` have a stall time (listed in {@link 6445 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration|ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS} 6446 using its respective size/format), then the frame duration in `F` determines the steady 6447 state frame rate that the application will get if it uses `R` as a repeating request. Let 6448 this special kind of request be called `Rsimple`. 6449 6450 A repeating request `Rsimple` can be _occasionally_ interleaved by a single capture of a 6451 new request `Rstall` (which has at least one in-use stream with a non-0 stall time) and if 6452 `Rstall` has the same minimum frame duration this will not cause a frame rate loss if all 6453 buffers from the previous `Rstall` have already been delivered. 6454 6455 For more details about stalling, see {@link 6456 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration|ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}. 6457 6458 This control is only effective if android.control.aeMode or android.control.mode is set to 6459 OFF; otherwise the auto-exposure algorithm will override this value. 6460 </details> 6461 <hal_details> 6462 For more details about stalling, see 6463 android.scaler.availableStallDurations. 6464 </hal_details> 6465 <tag id="V1" /> 6466 </entry> 6467 <entry name="sensitivity" type="int32" visibility="public" hwlevel="full"> 6468 <description>The amount of gain applied to sensor data 6469 before processing.</description> 6470 <units>ISO arithmetic units</units> 6471 <range>android.sensor.info.sensitivityRange</range> 6472 <details> 6473 The sensitivity is the standard ISO sensitivity value, 6474 as defined in ISO 12232:2006. 6475 6476 The sensitivity must be within android.sensor.info.sensitivityRange, and 6477 if if it less than android.sensor.maxAnalogSensitivity, the camera device 6478 is guaranteed to use only analog amplification for applying the gain. 6479 6480 If the camera device cannot apply the exact sensitivity 6481 requested, it will reduce the gain to the nearest supported 6482 value. The final sensitivity used will be available in the 6483 output capture result. 6484 6485 This control is only effective if android.control.aeMode or android.control.mode is set to 6486 OFF; otherwise the auto-exposure algorithm will override this value. 6487 </details> 6488 <hal_details>ISO 12232:2006 REI method is acceptable.</hal_details> 6489 <tag id="V1" /> 6490 </entry> 6491 </controls> 6492 <static> 6493 <namespace name="info"> 6494 <entry name="activeArraySize" type="int32" visibility="public" 6495 type_notes="Four ints defining the active pixel rectangle" 6496 container="array" typedef="rectangle" hwlevel="legacy"> 6497 <array> 6498 <size>4</size> 6499 </array> 6500 <description> 6501 The area of the image sensor which corresponds to active pixels after any geometric 6502 distortion correction has been applied. 6503 </description> 6504 <units>Pixel coordinates on the image sensor</units> 6505 <details> 6506 This is the rectangle representing the size of the active region of the sensor (i.e. 6507 the region that actually receives light from the scene) after any geometric correction 6508 has been applied, and should be treated as the maximum size in pixels of any of the 6509 image output formats aside from the raw formats. 6510 6511 This rectangle is defined relative to the full pixel array; (0,0) is the top-left of 6512 the full pixel array, and the size of the full pixel array is given by 6513 android.sensor.info.pixelArraySize. 6514 6515 The coordinate system for most other keys that list pixel coordinates, including 6516 android.scaler.cropRegion, is defined relative to the active array rectangle given in 6517 this field, with `(0, 0)` being the top-left of this rectangle. 6518 6519 The active array may be smaller than the full pixel array, since the full array may 6520 include black calibration pixels or other inactive regions, and geometric correction 6521 resulting in scaling or cropping may have been applied. 6522 </details> 6523 <ndk_details> 6524 The data representation is `int[4]`, which maps to `(left, top, width, height)`. 6525 </ndk_details> 6526 <hal_details> 6527 This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be 6528 &gt;= `(0,0)`. 6529 The `(width, height)` must be &lt;= `android.sensor.info.pixelArraySize`. 6530 </hal_details> 6531 <tag id="RAW" /> 6532 </entry> 6533 <entry name="sensitivityRange" type="int32" visibility="public" 6534 type_notes="Range of supported sensitivities" 6535 container="array" typedef="rangeInt" 6536 hwlevel="full"> 6537 <array> 6538 <size>2</size> 6539 </array> 6540 <description>Range of sensitivities for android.sensor.sensitivity supported by this 6541 camera device.</description> 6542 <range>Min <= 100, Max &gt;= 800</range> 6543 <details> 6544 The values are the standard ISO sensitivity values, 6545 as defined in ISO 12232:2006. 6546 </details> 6547 6548 <tag id="BC" /> 6549 <tag id="V1" /> 6550 </entry> 6551 <entry name="colorFilterArrangement" type="byte" visibility="public" enum="true" 6552 hwlevel="full"> 6553 <enum> 6554 <value>RGGB</value> 6555 <value>GRBG</value> 6556 <value>GBRG</value> 6557 <value>BGGR</value> 6558 <value>RGB 6559 <notes>Sensor is not Bayer; output has 3 16-bit 6560 values for each pixel, instead of just 1 16-bit value 6561 per pixel.</notes></value> 6562 </enum> 6563 <description>The arrangement of color filters on sensor; 6564 represents the colors in the top-left 2x2 section of 6565 the sensor, in reading order.</description> 6566 <tag id="RAW" /> 6567 </entry> 6568 <entry name="exposureTimeRange" type="int64" visibility="public" 6569 type_notes="nanoseconds" container="array" typedef="rangeLong" 6570 hwlevel="full"> 6571 <array> 6572 <size>2</size> 6573 </array> 6574 <description>The range of image exposure times for android.sensor.exposureTime supported 6575 by this camera device. 6576 </description> 6577 <units>Nanoseconds</units> 6578 <range>The minimum exposure time will be less than 100 us. For FULL 6579 capability devices (android.info.supportedHardwareLevel == FULL), 6580 the maximum exposure time will be greater than 100ms.</range> 6581 <hal_details>For FULL capability devices (android.info.supportedHardwareLevel == FULL), 6582 The maximum of the range SHOULD be at least 1 second (1e9), MUST be at least 6583 100ms. 6584 </hal_details> 6585 <tag id="V1" /> 6586 </entry> 6587 <entry name="maxFrameDuration" type="int64" visibility="public" 6588 hwlevel="full"> 6589 <description>The maximum possible frame duration (minimum frame rate) for 6590 android.sensor.frameDuration that is supported this camera device.</description> 6591 <units>Nanoseconds</units> 6592 <range>For FULL capability devices 6593 (android.info.supportedHardwareLevel == FULL), at least 100ms. 6594 </range> 6595 <details>Attempting to use frame durations beyond the maximum will result in the frame 6596 duration being clipped to the maximum. See that control for a full definition of frame 6597 durations. 6598 6599 Refer to {@link 6600 android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS} 6601 for the minimum frame duration values. 6602 </details> 6603 <hal_details> 6604 For FULL capability devices (android.info.supportedHardwareLevel == FULL), 6605 The maximum of the range SHOULD be at least 6606 1 second (1e9), MUST be at least 100ms (100e6). 6607 6608 android.sensor.info.maxFrameDuration must be greater or 6609 equal to the android.sensor.info.exposureTimeRange max 6610 value (since exposure time overrides frame duration). 6611 6612 Available minimum frame durations for JPEG must be no greater 6613 than that of the YUV_420_888/IMPLEMENTATION_DEFINED 6614 minimum frame durations (for that respective size). 6615 6616 Since JPEG processing is considered offline and can take longer than 6617 a single uncompressed capture, refer to 6618 android.scaler.availableStallDurations 6619 for details about encoding this scenario. 6620 </hal_details> 6621 <tag id="V1" /> 6622 </entry> 6623 <entry name="physicalSize" type="float" visibility="public" 6624 type_notes="width x height" 6625 container="array" typedef="sizeF" hwlevel="legacy"> 6626 <array> 6627 <size>2</size> 6628 </array> 6629 <description>The physical dimensions of the full pixel 6630 array.</description> 6631 <units>Millimeters</units> 6632 <details>This is the physical size of the sensor pixel 6633 array defined by android.sensor.info.pixelArraySize. 6634 </details> 6635 <hal_details>Needed for FOV calculation for old API</hal_details> 6636 <tag id="V1" /> 6637 <tag id="BC" /> 6638 </entry> 6639 <entry name="pixelArraySize" type="int32" visibility="public" 6640 container="array" typedef="size" hwlevel="legacy"> 6641 <array> 6642 <size>2</size> 6643 </array> 6644 <description>Dimensions of the full pixel array, possibly 6645 including black calibration pixels.</description> 6646 <units>Pixels</units> 6647 <details>The pixel count of the full pixel array of the image sensor, which covers 6648 android.sensor.info.physicalSize area. This represents the full pixel dimensions of 6649 the raw buffers produced by this sensor. 6650 6651 If a camera device supports raw sensor formats, either this or 6652 android.sensor.info.preCorrectionActiveArraySize is the maximum dimensions for the raw 6653 output formats listed in {@link 6654 android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} 6655 (this depends on whether or not the image sensor returns buffers containing pixels that 6656 are not part of the active array region for blacklevel calibration or other purposes). 6657 6658 Some parts of the full pixel array may not receive light from the scene, 6659 or be otherwise inactive. The android.sensor.info.preCorrectionActiveArraySize key 6660 defines the rectangle of active pixels that will be included in processed image 6661 formats. 6662 </details> 6663 <tag id="RAW" /> 6664 <tag id="BC" /> 6665 </entry> 6666 <entry name="whiteLevel" type="int32" visibility="public"> 6667 <description> 6668 Maximum raw value output by sensor. 6669 </description> 6670 <range>&gt; 255 (8-bit output)</range> 6671 <details> 6672 This specifies the fully-saturated encoding level for the raw 6673 sample values from the sensor. This is typically caused by the 6674 sensor becoming highly non-linear or clipping. The minimum for 6675 each channel is specified by the offset in the 6676 android.sensor.blackLevelPattern key. 6677 6678 The white level is typically determined either by sensor bit depth 6679 (8-14 bits is expected), or by the point where the sensor response 6680 becomes too non-linear to be useful. The default value for this is 6681 maximum representable value for a 16-bit raw sample (2^16 - 1). 6682 6683 The white level values of captured images may vary for different 6684 capture settings (e.g., android.sensor.sensitivity). This key 6685 represents a coarse approximation for such case. It is recommended 6686 to use android.sensor.dynamicWhiteLevel for captures when supported 6687 by the camera device, which provides more accurate white level values. 6688 </details> 6689 <hal_details> 6690 The full bit depth of the sensor must be available in the raw data, 6691 so the value for linear sensors should not be significantly lower 6692 than maximum raw value supported, i.e. 2^(sensor bits per pixel). 6693 </hal_details> 6694 <tag id="RAW" /> 6695 </entry> 6696 <entry name="timestampSource" type="byte" visibility="public" 6697 enum="true" hwlevel="legacy"> 6698 <enum> 6699 <value>UNKNOWN 6700 <notes> 6701 Timestamps from android.sensor.timestamp are in nanoseconds and monotonic, 6702 but can not be compared to timestamps from other subsystems 6703 (e.g. accelerometer, gyro etc.), or other instances of the same or different 6704 camera devices in the same system. Timestamps between streams and results for 6705 a single camera instance are comparable, and the timestamps for all buffers 6706 and the result metadata generated by a single capture are identical. 6707 </notes> 6708 </value> 6709 <value>REALTIME 6710 <notes> 6711 Timestamps from android.sensor.timestamp are in the same timebase as 6712 {@link android.os.SystemClock#elapsedRealtimeNanos}, 6713 and they can be compared to other timestamps using that base. 6714 </notes> 6715 </value> 6716 </enum> 6717 <description>The time base source for sensor capture start timestamps.</description> 6718 <details> 6719 The timestamps provided for captures are always in nanoseconds and monotonic, but 6720 may not based on a time source that can be compared to other system time sources. 6721 6722 This characteristic defines the source for the timestamps, and therefore whether they 6723 can be compared against other system time sources/timestamps. 6724 </details> 6725 <hal_details> 6726 For camera devices implement UNKNOWN, the camera framework expects that the timestamp 6727 source to be SYSTEM_TIME_MONOTONIC. For camera devices implement REALTIME, the camera 6728 framework expects that the timestamp source to be SYSTEM_TIME_BOOTTIME. See 6729 system/core/include/utils/Timers.h for the definition of SYSTEM_TIME_MONOTONIC and 6730 SYSTEM_TIME_BOOTTIME. Note that HAL must follow above expectation; otherwise video 6731 recording might suffer unexpected behavior. 6732 6733 Also, camera devices which implement REALTIME must pass the ITS sensor fusion test which 6734 tests the alignment between camera timestamps and gyro sensor timestamps. 6735 </hal_details> 6736 <tag id="V1" /> 6737 </entry> 6738 <entry name="lensShadingApplied" type="byte" visibility="public" enum="true" 6739 typedef="boolean"> 6740 <enum> 6741 <value>FALSE</value> 6742 <value>TRUE</value> 6743 </enum> 6744 <description>Whether the RAW images output from this camera device are subject to 6745 lens shading correction.</description> 6746 <details> 6747 If TRUE, all images produced by the camera device in the RAW image formats will 6748 have lens shading correction already applied to it. If FALSE, the images will 6749 not be adjusted for lens shading correction. 6750 See android.request.maxNumOutputRaw for a list of RAW image formats. 6751 6752 This key will be `null` for all devices do not report this information. 6753 Devices with RAW capability will always report this information in this key. 6754 </details> 6755 </entry> 6756 <entry name="preCorrectionActiveArraySize" type="int32" visibility="public" 6757 type_notes="Four ints defining the active pixel rectangle" container="array" 6758 typedef="rectangle" hwlevel="legacy"> 6759 <array> 6760 <size>4</size> 6761 </array> 6762 <description> 6763 The area of the image sensor which corresponds to active pixels prior to the 6764 application of any geometric distortion correction. 6765 </description> 6766 <units>Pixel coordinates on the image sensor</units> 6767 <details> 6768 This is the rectangle representing the size of the active region of the sensor (i.e. 6769 the region that actually receives light from the scene) before any geometric correction 6770 has been applied, and should be treated as the active region rectangle for any of the 6771 raw formats. All metadata associated with raw processing (e.g. the lens shading 6772 correction map, and radial distortion fields) treats the top, left of this rectangle as 6773 the origin, (0,0). 6774 6775 The size of this region determines the maximum field of view and the maximum number of 6776 pixels that an image from this sensor can contain, prior to the application of 6777 geometric distortion correction. The effective maximum pixel dimensions of a 6778 post-distortion-corrected image is given by the android.sensor.info.activeArraySize 6779 field, and the effective maximum field of view for a post-distortion-corrected image 6780 can be calculated by applying the geometric distortion correction fields to this 6781 rectangle, and cropping to the rectangle given in android.sensor.info.activeArraySize. 6782 6783 E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the 6784 dimensions in android.sensor.info.activeArraySize given the position of a pixel, 6785 (x', y'), in the raw pixel array with dimensions give in 6786 android.sensor.info.pixelArraySize: 6787 6788 1. Choose a pixel (x', y') within the active array region of the raw buffer given in 6789 android.sensor.info.preCorrectionActiveArraySize, otherwise this pixel is considered 6790 to be outside of the FOV, and will not be shown in the processed output image. 6791 1. Apply geometric distortion correction to get the post-distortion pixel coordinate, 6792 (x_i, y_i). When applying geometric correction metadata, note that metadata for raw 6793 buffers is defined relative to the top, left of the 6794 android.sensor.info.preCorrectionActiveArraySize rectangle. 6795 1. If the resulting corrected pixel coordinate is within the region given in 6796 android.sensor.info.activeArraySize, then the position of this pixel in the 6797 processed output image buffer is `(x_i - activeArray.left, y_i - activeArray.top)`, 6798 when the top, left coordinate of that buffer is treated as (0, 0). 6799 6800 Thus, for pixel x',y' = (25, 25) on a sensor where android.sensor.info.pixelArraySize 6801 is (100,100), android.sensor.info.preCorrectionActiveArraySize is (10, 10, 100, 100), 6802 android.sensor.info.activeArraySize is (20, 20, 80, 80), and the geometric distortion 6803 correction doesn't change the pixel coordinate, the resulting pixel selected in 6804 pixel coordinates would be x,y = (25, 25) relative to the top,left of the raw buffer 6805 with dimensions given in android.sensor.info.pixelArraySize, and would be (5, 5) 6806 relative to the top,left of post-processed YUV output buffer with dimensions given in 6807 android.sensor.info.activeArraySize. 6808 6809 The currently supported fields that correct for geometric distortion are: 6810 6811 1. android.lens.distortion. 6812 6813 If all of the geometric distortion fields are no-ops, this rectangle will be the same 6814 as the post-distortion-corrected rectangle given in 6815 android.sensor.info.activeArraySize. 6816 6817 This rectangle is defined relative to the full pixel array; (0,0) is the top-left of 6818 the full pixel array, and the size of the full pixel array is given by 6819 android.sensor.info.pixelArraySize. 6820 6821 The pre-correction active array may be smaller than the full pixel array, since the 6822 full array may include black calibration pixels or other inactive regions. 6823 </details> 6824 <ndk_details> 6825 The data representation is `int[4]`, which maps to `(left, top, width, height)`. 6826 </ndk_details> 6827 <hal_details> 6828 This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be 6829 &gt;= `(0,0)`. 6830 The `(width, height)` must be &lt;= `android.sensor.info.pixelArraySize`. 6831 6832 If omitted by the HAL implementation, the camera framework will assume that this is 6833 the same as the post-correction active array region given in 6834 android.sensor.info.activeArraySize. 6835 </hal_details> 6836 <tag id="RAW" /> 6837 </entry> 6838 </namespace> 6839 <entry name="referenceIlluminant1" type="byte" visibility="public" 6840 enum="true"> 6841 <enum> 6842 <value id="1">DAYLIGHT</value> 6843 <value id="2">FLUORESCENT</value> 6844 <value id="3">TUNGSTEN 6845 <notes>Incandescent light</notes> 6846 </value> 6847 <value id="4">FLASH</value> 6848 <value id="9">FINE_WEATHER</value> 6849 <value id="10">CLOUDY_WEATHER</value> 6850 <value id="11">SHADE</value> 6851 <value id="12">DAYLIGHT_FLUORESCENT 6852 <notes>D 5700 - 7100K</notes> 6853 </value> 6854 <value id="13">DAY_WHITE_FLUORESCENT 6855 <notes>N 4600 - 5400K</notes> 6856 </value> 6857 <value id="14">COOL_WHITE_FLUORESCENT 6858 <notes>W 3900 - 4500K</notes> 6859 </value> 6860 <value id="15">WHITE_FLUORESCENT 6861 <notes>WW 3200 - 3700K</notes> 6862 </value> 6863 <value id="17">STANDARD_A</value> 6864 <value id="18">STANDARD_B</value> 6865 <value id="19">STANDARD_C</value> 6866 <value id="20">D55</value> 6867 <value id="21">D65</value> 6868 <value id="22">D75</value> 6869 <value id="23">D50</value> 6870 <value id="24">ISO_STUDIO_TUNGSTEN</value> 6871 </enum> 6872 <description> 6873 The standard reference illuminant used as the scene light source when 6874 calculating the android.sensor.colorTransform1, 6875 android.sensor.calibrationTransform1, and 6876 android.sensor.forwardMatrix1 matrices. 6877 </description> 6878 <details> 6879 The values in this key correspond to the values defined for the 6880 EXIF LightSource tag. These illuminants are standard light sources 6881 that are often used calibrating camera devices. 6882 6883 If this key is present, then android.sensor.colorTransform1, 6884 android.sensor.calibrationTransform1, and 6885 android.sensor.forwardMatrix1 will also be present. 6886 6887 Some devices may choose to provide a second set of calibration 6888 information for improved quality, including 6889 android.sensor.referenceIlluminant2 and its corresponding matrices. 6890 </details> 6891 <hal_details> 6892 The first reference illuminant (android.sensor.referenceIlluminant1) 6893 and corresponding matrices must be present to support the RAW capability 6894 and DNG output. 6895 6896 When producing raw images with a color profile that has only been 6897 calibrated against a single light source, it is valid to omit 6898 android.sensor.referenceIlluminant2 along with the 6899 android.sensor.colorTransform2, android.sensor.calibrationTransform2, 6900 and android.sensor.forwardMatrix2 matrices. 6901 6902 If only android.sensor.referenceIlluminant1 is included, it should be 6903 chosen so that it is representative of typical scene lighting. In 6904 general, D50 or DAYLIGHT will be chosen for this case. 6905 6906 If both android.sensor.referenceIlluminant1 and 6907 android.sensor.referenceIlluminant2 are included, they should be 6908 chosen to represent the typical range of scene lighting conditions. 6909 In general, low color temperature illuminant such as Standard-A will 6910 be chosen for the first reference illuminant and a higher color 6911 temperature illuminant such as D65 will be chosen for the second 6912 reference illuminant. 6913 </hal_details> 6914 <tag id="RAW" /> 6915 </entry> 6916 <entry name="referenceIlluminant2" type="byte" visibility="public"> 6917 <description> 6918 The standard reference illuminant used as the scene light source when 6919 calculating the android.sensor.colorTransform2, 6920 android.sensor.calibrationTransform2, and 6921 android.sensor.forwardMatrix2 matrices. 6922 </description> 6923 <range>Any value listed in android.sensor.referenceIlluminant1</range> 6924 <details> 6925 See android.sensor.referenceIlluminant1 for more details. 6926 6927 If this key is present, then android.sensor.colorTransform2, 6928 android.sensor.calibrationTransform2, and 6929 android.sensor.forwardMatrix2 will also be present. 6930 </details> 6931 <tag id="RAW" /> 6932 </entry> 6933 <entry name="calibrationTransform1" type="rational" 6934 visibility="public" optional="true" 6935 type_notes="3x3 matrix in row-major-order" container="array" 6936 typedef="colorSpaceTransform"> 6937 <array> 6938 <size>3</size> 6939 <size>3</size> 6940 </array> 6941 <description> 6942 A per-device calibration transform matrix that maps from the 6943 reference sensor colorspace to the actual device sensor colorspace. 6944 </description> 6945 <details> 6946 This matrix is used to correct for per-device variations in the 6947 sensor colorspace, and is used for processing raw buffer data. 6948 6949 The matrix is expressed as a 3x3 matrix in row-major-order, and 6950 contains a per-device calibration transform that maps colors 6951 from reference sensor color space (i.e. the "golden module" 6952 colorspace) into this camera device's native sensor color 6953 space under the first reference illuminant 6954 (android.sensor.referenceIlluminant1). 6955 </details> 6956 <tag id="RAW" /> 6957 </entry> 6958 <entry name="calibrationTransform2" type="rational" 6959 visibility="public" optional="true" 6960 type_notes="3x3 matrix in row-major-order" container="array" 6961 typedef="colorSpaceTransform"> 6962 <array> 6963 <size>3</size> 6964 <size>3</size> 6965 </array> 6966 <description> 6967 A per-device calibration transform matrix that maps from the 6968 reference sensor colorspace to the actual device sensor colorspace 6969 (this is the colorspace of the raw buffer data). 6970 </description> 6971 <details> 6972 This matrix is used to correct for per-device variations in the 6973 sensor colorspace, and is used for processing raw buffer data. 6974 6975 The matrix is expressed as a 3x3 matrix in row-major-order, and 6976 contains a per-device calibration transform that maps colors 6977 from reference sensor color space (i.e. the "golden module" 6978 colorspace) into this camera device's native sensor color 6979 space under the second reference illuminant 6980 (android.sensor.referenceIlluminant2). 6981 6982 This matrix will only be present if the second reference 6983 illuminant is present. 6984 </details> 6985 <tag id="RAW" /> 6986 </entry> 6987 <entry name="colorTransform1" type="rational" 6988 visibility="public" optional="true" 6989 type_notes="3x3 matrix in row-major-order" container="array" 6990 typedef="colorSpaceTransform"> 6991 <array> 6992 <size>3</size> 6993 <size>3</size> 6994 </array> 6995 <description> 6996 A matrix that transforms color values from CIE XYZ color space to 6997 reference sensor color space. 6998 </description> 6999 <details> 7000 This matrix is used to convert from the standard CIE XYZ color 7001 space to the reference sensor colorspace, and is used when processing 7002 raw buffer data. 7003 7004 The matrix is expressed as a 3x3 matrix in row-major-order, and 7005 contains a color transform matrix that maps colors from the CIE 7006 XYZ color space to the reference sensor color space (i.e. the 7007 "golden module" colorspace) under the first reference illuminant 7008 (android.sensor.referenceIlluminant1). 7009 7010 The white points chosen in both the reference sensor color space 7011 and the CIE XYZ colorspace when calculating this transform will 7012 match the standard white point for the first reference illuminant 7013 (i.e. no chromatic adaptation will be applied by this transform). 7014 </details> 7015 <tag id="RAW" /> 7016 </entry> 7017 <entry name="colorTransform2" type="rational" 7018 visibility="public" optional="true" 7019 type_notes="3x3 matrix in row-major-order" container="array" 7020 typedef="colorSpaceTransform"> 7021 <array> 7022 <size>3</size> 7023 <size>3</size> 7024 </array> 7025 <description> 7026 A matrix that transforms color values from CIE XYZ color space to 7027 reference sensor color space. 7028 </description> 7029 <details> 7030 This matrix is used to convert from the standard CIE XYZ color 7031 space to the reference sensor colorspace, and is used when processing 7032 raw buffer data. 7033 7034 The matrix is expressed as a 3x3 matrix in row-major-order, and 7035 contains a color transform matrix that maps colors from the CIE 7036 XYZ color space to the reference sensor color space (i.e. the 7037 "golden module" colorspace) under the second reference illuminant 7038 (android.sensor.referenceIlluminant2). 7039 7040 The white points chosen in both the reference sensor color space 7041 and the CIE XYZ colorspace when calculating this transform will 7042 match the standard white point for the second reference illuminant 7043 (i.e. no chromatic adaptation will be applied by this transform). 7044 7045 This matrix will only be present if the second reference 7046 illuminant is present. 7047 </details> 7048 <tag id="RAW" /> 7049 </entry> 7050 <entry name="forwardMatrix1" type="rational" 7051 visibility="public" optional="true" 7052 type_notes="3x3 matrix in row-major-order" container="array" 7053 typedef="colorSpaceTransform"> 7054 <array> 7055 <size>3</size> 7056 <size>3</size> 7057 </array> 7058 <description> 7059 A matrix that transforms white balanced camera colors from the reference 7060 sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint. 7061 </description> 7062 <details> 7063 This matrix is used to convert to the standard CIE XYZ colorspace, and 7064 is used when processing raw buffer data. 7065 7066 This matrix is expressed as a 3x3 matrix in row-major-order, and contains 7067 a color transform matrix that maps white balanced colors from the 7068 reference sensor color space to the CIE XYZ color space with a D50 white 7069 point. 7070 7071 Under the first reference illuminant (android.sensor.referenceIlluminant1) 7072 this matrix is chosen so that the standard white point for this reference 7073 illuminant in the reference sensor colorspace is mapped to D50 in the 7074 CIE XYZ colorspace. 7075 </details> 7076 <tag id="RAW" /> 7077 </entry> 7078 <entry name="forwardMatrix2" type="rational" 7079 visibility="public" optional="true" 7080 type_notes="3x3 matrix in row-major-order" container="array" 7081 typedef="colorSpaceTransform"> 7082 <array> 7083 <size>3</size> 7084 <size>3</size> 7085 </array> 7086 <description> 7087 A matrix that transforms white balanced camera colors from the reference 7088 sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint. 7089 </description> 7090 <details> 7091 This matrix is used to convert to the standard CIE XYZ colorspace, and 7092 is used when processing raw buffer data. 7093 7094 This matrix is expressed as a 3x3 matrix in row-major-order, and contains 7095 a color transform matrix that maps white balanced colors from the 7096 reference sensor color space to the CIE XYZ color space with a D50 white 7097 point. 7098 7099 Under the second reference illuminant (android.sensor.referenceIlluminant2) 7100 this matrix is chosen so that the standard white point for this reference 7101 illuminant in the reference sensor colorspace is mapped to D50 in the 7102 CIE XYZ colorspace. 7103 7104 This matrix will only be present if the second reference 7105 illuminant is present. 7106 </details> 7107 <tag id="RAW" /> 7108 </entry> 7109 <entry name="baseGainFactor" type="rational" 7110 optional="true"> 7111 <description>Gain factor from electrons to raw units when 7112 ISO=100</description> 7113 <tag id="FUTURE" /> 7114 </entry> 7115 <entry name="blackLevelPattern" type="int32" visibility="public" 7116 optional="true" type_notes="2x2 raw count block" container="array" 7117 typedef="blackLevelPattern"> 7118 <array> 7119 <size>4</size> 7120 </array> 7121 <description> 7122 A fixed black level offset for each of the color filter arrangement 7123 (CFA) mosaic channels. 7124 </description> 7125 <range>&gt;= 0 for each.</range> 7126 <details> 7127 This key specifies the zero light value for each of the CFA mosaic 7128 channels in the camera sensor. The maximal value output by the 7129 sensor is represented by the value in android.sensor.info.whiteLevel. 7130 7131 The values are given in the same order as channels listed for the CFA 7132 layout key (see android.sensor.info.colorFilterArrangement), i.e. the 7133 nth value given corresponds to the black level offset for the nth 7134 color channel listed in the CFA. 7135 7136 The black level values of captured images may vary for different 7137 capture settings (e.g., android.sensor.sensitivity). This key 7138 represents a coarse approximation for such case. It is recommended to 7139 use android.sensor.dynamicBlackLevel or use pixels from 7140 android.sensor.opticalBlackRegions directly for captures when 7141 supported by the camera device, which provides more accurate black 7142 level values. For raw capture in particular, it is recommended to use 7143 pixels from android.sensor.opticalBlackRegions to calculate black 7144 level values for each frame. 7145 </details> 7146 <hal_details> 7147 The values are given in row-column scan order, with the first value 7148 corresponding to the element of the CFA in row=0, column=0. 7149 </hal_details> 7150 <tag id="RAW" /> 7151 </entry> 7152 <entry name="maxAnalogSensitivity" type="int32" visibility="public" 7153 optional="true" hwlevel="full"> 7154 <description>Maximum sensitivity that is implemented 7155 purely through analog gain.</description> 7156 <details>For android.sensor.sensitivity values less than or 7157 equal to this, all applied gain must be analog. For 7158 values above this, the gain applied can be a mix of analog and 7159 digital.</details> 7160 <tag id="V1" /> 7161 <tag id="FULL" /> 7162 </entry> 7163 <entry name="orientation" type="int32" visibility="public" 7164 hwlevel="legacy"> 7165 <description>Clockwise angle through which the output image needs to be rotated to be 7166 upright on the device screen in its native orientation. 7167 </description> 7168 <units>Degrees of clockwise rotation; always a multiple of 7169 90</units> 7170 <range>0, 90, 180, 270</range> 7171 <details> 7172 Also defines the direction of rolling shutter readout, which is from top to bottom in 7173 the sensor's coordinate system. 7174 </details> 7175 <tag id="BC" /> 7176 </entry> 7177 <entry name="profileHueSatMapDimensions" type="int32" 7178 visibility="system" optional="true" 7179 type_notes="Number of samples for hue, saturation, and value" 7180 container="array"> 7181 <array> 7182 <size>3</size> 7183 </array> 7184 <description> 7185 The number of input samples for each dimension of 7186 android.sensor.profileHueSatMap. 7187 </description> 7188 <range> 7189 Hue &gt;= 1, 7190 Saturation &gt;= 2, 7191 Value &gt;= 1 7192 </range> 7193 <details> 7194 The number of input samples for the hue, saturation, and value 7195 dimension of android.sensor.profileHueSatMap. The order of the 7196 dimensions given is hue, saturation, value; where hue is the 0th 7197 element. 7198 </details> 7199 <tag id="RAW" /> 7200 </entry> 7201 </static> 7202 <dynamic> 7203 <clone entry="android.sensor.exposureTime" kind="controls"> 7204 </clone> 7205 <clone entry="android.sensor.frameDuration" 7206 kind="controls"></clone> 7207 <clone entry="android.sensor.sensitivity" kind="controls"> 7208 </clone> 7209 <entry name="timestamp" type="int64" visibility="public" 7210 hwlevel="legacy"> 7211 <description>Time at start of exposure of first 7212 row of the image sensor active array, in nanoseconds.</description> 7213 <units>Nanoseconds</units> 7214 <range>&gt; 0</range> 7215 <details>The timestamps are also included in all image 7216 buffers produced for the same capture, and will be identical 7217 on all the outputs. 7218 7219 When android.sensor.info.timestampSource `==` UNKNOWN, 7220 the timestamps measure time since an unspecified starting point, 7221 and are monotonically increasing. They can be compared with the 7222 timestamps for other captures from the same camera device, but are 7223 not guaranteed to be comparable to any other time source. 7224 7225 When android.sensor.info.timestampSource `==` REALTIME, the 7226 timestamps measure time in the same timebase as {@link 7227 android.os.SystemClock#elapsedRealtimeNanos}, and they can 7228 be compared to other timestamps from other subsystems that 7229 are using that base. 7230 7231 For reprocessing, the timestamp will match the start of exposure of 7232 the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the 7233 timestamp} in the TotalCaptureResult that was used to create the 7234 reprocess capture request. 7235 </details> 7236 <hal_details> 7237 All timestamps must be in reference to the kernel's 7238 CLOCK_BOOTTIME monotonic clock, which properly accounts for 7239 time spent asleep. This allows for synchronization with 7240 sensors that continue to operate while the system is 7241 otherwise asleep. 7242 7243 If android.sensor.info.timestampSource `==` REALTIME, 7244 The timestamp must be synchronized with the timestamps from other 7245 sensor subsystems that are using the same timebase. 7246 7247 For reprocessing, the input image's start of exposure can be looked up 7248 with android.sensor.timestamp from the metadata included in the 7249 capture request. 7250 </hal_details> 7251 <tag id="BC" /> 7252 </entry> 7253 <entry name="temperature" type="float" 7254 optional="true"> 7255 <description>The temperature of the sensor, sampled at the time 7256 exposure began for this frame. 7257 7258 The thermal diode being queried should be inside the sensor PCB, or 7259 somewhere close to it. 7260 </description> 7261 7262 <units>Celsius</units> 7263 <range>Optional. This value is missing if no temperature is available.</range> 7264 <tag id="FUTURE" /> 7265 </entry> 7266 <entry name="neutralColorPoint" type="rational" visibility="public" 7267 optional="true" container="array"> 7268 <array> 7269 <size>3</size> 7270 </array> 7271 <description> 7272 The estimated camera neutral color in the native sensor colorspace at 7273 the time of capture. 7274 </description> 7275 <details> 7276 This value gives the neutral color point encoded as an RGB value in the 7277 native sensor color space. The neutral color point indicates the 7278 currently estimated white point of the scene illumination. It can be 7279 used to interpolate between the provided color transforms when 7280 processing raw sensor data. 7281 7282 The order of the values is R, G, B; where R is in the lowest index. 7283 </details> 7284 <tag id="RAW" /> 7285 </entry> 7286 <entry name="noiseProfile" type="double" visibility="public" 7287 optional="true" type_notes="Pairs of noise model coefficients" 7288 container="array" typedef="pairDoubleDouble"> 7289 <array> 7290 <size>2</size> 7291 <size>CFA Channels</size> 7292 </array> 7293 <description> 7294 Noise model coefficients for each CFA mosaic channel. 7295 </description> 7296 <details> 7297 This key contains two noise model coefficients for each CFA channel 7298 corresponding to the sensor amplification (S) and sensor readout 7299 noise (O). These are given as pairs of coefficients for each channel 7300 in the same order as channels listed for the CFA layout key 7301 (see android.sensor.info.colorFilterArrangement). This is 7302 represented as an array of Pair&lt;Double, Double&gt;, where 7303 the first member of the Pair at index n is the S coefficient and the 7304 second member is the O coefficient for the nth color channel in the CFA. 7305 7306 These coefficients are used in a two parameter noise model to describe 7307 the amount of noise present in the image for each CFA channel. The 7308 noise model used here is: 7309 7310 N(x) = sqrt(Sx + O) 7311 7312 Where x represents the recorded signal of a CFA channel normalized to 7313 the range [0, 1], and S and O are the noise model coeffiecients for 7314 that channel. 7315 7316 A more detailed description of the noise model can be found in the 7317 Adobe DNG specification for the NoiseProfile tag. 7318 </details> 7319 <hal_details> 7320 For a CFA layout of RGGB, the list of coefficients would be given as 7321 an array of doubles S0,O0,S1,O1,..., where S0 and O0 are the coefficients 7322 for the red channel, S1 and O1 are the coefficients for the first green 7323 channel, etc. 7324 </hal_details> 7325 <tag id="RAW" /> 7326 </entry> 7327 <entry name="profileHueSatMap" type="float" 7328 visibility="system" optional="true" 7329 type_notes="Mapping for hue, saturation, and value" 7330 container="array"> 7331 <array> 7332 <size>hue_samples</size> 7333 <size>saturation_samples</size> 7334 <size>value_samples</size> 7335 <size>3</size> 7336 </array> 7337 <description> 7338 A mapping containing a hue shift, saturation scale, and value scale 7339 for each pixel. 7340 </description> 7341 <units> 7342 The hue shift is given in degrees; saturation and value scale factors are 7343 unitless and are between 0 and 1 inclusive 7344 </units> 7345 <details> 7346 hue_samples, saturation_samples, and value_samples are given in 7347 android.sensor.profileHueSatMapDimensions. 7348 7349 Each entry of this map contains three floats corresponding to the 7350 hue shift, saturation scale, and value scale, respectively; where the 7351 hue shift has the lowest index. The map entries are stored in the key 7352 in nested loop order, with the value divisions in the outer loop, the 7353 hue divisions in the middle loop, and the saturation divisions in the 7354 inner loop. All zero input saturation entries are required to have a 7355 value scale factor of 1.0. 7356 </details> 7357 <tag id="RAW" /> 7358 </entry> 7359 <entry name="profileToneCurve" type="float" 7360 visibility="system" optional="true" 7361 type_notes="Samples defining a spline for a tone-mapping curve" 7362 container="array"> 7363 <array> 7364 <size>samples</size> 7365 <size>2</size> 7366 </array> 7367 <description> 7368 A list of x,y samples defining a tone-mapping curve for gamma adjustment. 7369 </description> 7370 <range> 7371 Each sample has an input range of `[0, 1]` and an output range of 7372 `[0, 1]`. The first sample is required to be `(0, 0)`, and the last 7373 sample is required to be `(1, 1)`. 7374 </range> 7375 <details> 7376 This key contains a default tone curve that can be applied while 7377 processing the image as a starting point for user adjustments. 7378 The curve is specified as a list of value pairs in linear gamma. 7379 The curve is interpolated using a cubic spline. 7380 </details> 7381 <tag id="RAW" /> 7382 </entry> 7383 <entry name="greenSplit" type="float" visibility="public" optional="true"> 7384 <description> 7385 The worst-case divergence between Bayer green channels. 7386 </description> 7387 <range> 7388 &gt;= 0 7389 </range> 7390 <details> 7391 This value is an estimate of the worst case split between the 7392 Bayer green channels in the red and blue rows in the sensor color 7393 filter array. 7394 7395 The green split is calculated as follows: 7396 7397 1. A 5x5 pixel (or larger) window W within the active sensor array is 7398 chosen. The term 'pixel' here is taken to mean a group of 4 Bayer 7399 mosaic channels (R, Gr, Gb, B). The location and size of the window 7400 chosen is implementation defined, and should be chosen to provide a 7401 green split estimate that is both representative of the entire image 7402 for this camera sensor, and can be calculated quickly. 7403 1. The arithmetic mean of the green channels from the red 7404 rows (mean_Gr) within W is computed. 7405 1. The arithmetic mean of the green channels from the blue 7406 rows (mean_Gb) within W is computed. 7407 1. The maximum ratio R of the two means is computed as follows: 7408 `R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))` 7409 7410 The ratio R is the green split divergence reported for this property, 7411 which represents how much the green channels differ in the mosaic 7412 pattern. This value is typically used to determine the treatment of 7413 the green mosaic channels when demosaicing. 7414 7415 The green split value can be roughly interpreted as follows: 7416 7417 * R &lt; 1.03 is a negligible split (&lt;3% divergence). 7418 * 1.20 &lt;= R &gt;= 1.03 will require some software 7419 correction to avoid demosaic errors (3-20% divergence). 7420 * R &gt; 1.20 will require strong software correction to produce 7421 a usuable image (&gt;20% divergence). 7422 </details> 7423 <hal_details> 7424 The green split given may be a static value based on prior 7425 characterization of the camera sensor using the green split 7426 calculation method given here over a large, representative, sample 7427 set of images. Other methods of calculation that produce equivalent 7428 results, and can be interpreted in the same manner, may be used. 7429 </hal_details> 7430 <tag id="RAW" /> 7431 </entry> 7432 </dynamic> 7433 <controls> 7434 <entry name="testPatternData" type="int32" visibility="public" optional="true" container="array"> 7435 <array> 7436 <size>4</size> 7437 </array> 7438 <description> 7439 A pixel `[R, G_even, G_odd, B]` that supplies the test pattern 7440 when android.sensor.testPatternMode is SOLID_COLOR. 7441 </description> 7442 <details> 7443 Each color channel is treated as an unsigned 32-bit integer. 7444 The camera device then uses the most significant X bits 7445 that correspond to how many bits are in its Bayer raw sensor 7446 output. 7447 7448 For example, a sensor with RAW10 Bayer output would use the 7449 10 most significant bits from each color channel. 7450 </details> 7451 <hal_details> 7452 </hal_details> 7453 </entry> 7454 <entry name="testPatternMode" type="int32" visibility="public" optional="true" 7455 enum="true"> 7456 <enum> 7457 <value>OFF 7458 <notes>No test pattern mode is used, and the camera 7459 device returns captures from the image sensor. 7460 7461 This is the default if the key is not set.</notes> 7462 </value> 7463 <value>SOLID_COLOR 7464 <notes> 7465 Each pixel in `[R, G_even, G_odd, B]` is replaced by its 7466 respective color channel provided in 7467 android.sensor.testPatternData. 7468 7469 For example: 7470 7471 android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0] 7472 7473 All green pixels are 100% green. All red/blue pixels are black. 7474 7475 android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0] 7476 7477 All red pixels are 100% red. Only the odd green pixels 7478 are 100% green. All blue pixels are 100% black. 7479 </notes> 7480 </value> 7481 <value>COLOR_BARS 7482 <notes> 7483 All pixel data is replaced with an 8-bar color pattern. 7484 7485 The vertical bars (left-to-right) are as follows: 7486 7487 * 100% white 7488 * yellow 7489 * cyan 7490 * green 7491 * magenta 7492 * red 7493 * blue 7494 * black 7495 7496 In general the image would look like the following: 7497 7498 W Y C G M R B K 7499 W Y C G M R B K 7500 W Y C G M R B K 7501 W Y C G M R B K 7502 W Y C G M R B K 7503 . . . . . . . . 7504 . . . . . . . . 7505 . . . . . . . . 7506 7507 (B = Blue, K = Black) 7508 7509 Each bar should take up 1/8 of the sensor pixel array width. 7510 When this is not possible, the bar size should be rounded 7511 down to the nearest integer and the pattern can repeat 7512 on the right side. 7513 7514 Each bar's height must always take up the full sensor 7515 pixel array height. 7516 7517 Each pixel in this test pattern must be set to either 7518 0% intensity or 100% intensity. 7519 </notes> 7520 </value> 7521 <value>COLOR_BARS_FADE_TO_GRAY 7522 <notes> 7523 The test pattern is similar to COLOR_BARS, except that 7524 each bar should start at its specified color at the top, 7525 and fade to gray at the bottom. 7526 7527 Furthermore each bar is further subdivided into a left and 7528 right half. The left half should have a smooth gradient, 7529 and the right half should have a quantized gradient. 7530 7531 In particular, the right half's should consist of blocks of the 7532 same color for 1/16th active sensor pixel array width. 7533 7534 The least significant bits in the quantized gradient should 7535 be copied from the most significant bits of the smooth gradient. 7536 7537 The height of each bar should always be a multiple of 128. 7538 When this is not the case, the pattern should repeat at the bottom 7539 of the image. 7540 </notes> 7541 </value> 7542 <value>PN9 7543 <notes> 7544 All pixel data is replaced by a pseudo-random sequence 7545 generated from a PN9 512-bit sequence (typically implemented 7546 in hardware with a linear feedback shift register). 7547 7548 The generator should be reset at the beginning of each frame, 7549 and thus each subsequent raw frame with this test pattern should 7550 be exactly the same as the last. 7551 </notes> 7552 </value> 7553 <value id="256">CUSTOM1 7554 <notes>The first custom test pattern. All custom patterns that are 7555 available only on this camera device are at least this numeric 7556 value. 7557 7558 All of the custom test patterns will be static 7559 (that is the raw image must not vary from frame to frame). 7560 </notes> 7561 </value> 7562 </enum> 7563 <description>When enabled, the sensor sends a test pattern instead of 7564 doing a real exposure from the camera. 7565 </description> 7566 <range>android.sensor.availableTestPatternModes</range> 7567 <details> 7568 When a test pattern is enabled, all manual sensor controls specified 7569 by android.sensor.* will be ignored. All other controls should 7570 work as normal. 7571 7572 For example, if manual flash is enabled, flash firing should still 7573 occur (and that the test pattern remain unmodified, since the flash 7574 would not actually affect it). 7575 7576 Defaults to OFF. 7577 </details> 7578 <hal_details> 7579 All test patterns are specified in the Bayer domain. 7580 7581 The HAL may choose to substitute test patterns from the sensor 7582 with test patterns from on-device memory. In that case, it should be 7583 indistinguishable to the ISP whether the data came from the 7584 sensor interconnect bus (such as CSI2) or memory. 7585 </hal_details> 7586 </entry> 7587 </controls> 7588 <dynamic> 7589 <clone entry="android.sensor.testPatternData" kind="controls"> 7590 </clone> 7591 <clone entry="android.sensor.testPatternMode" kind="controls"> 7592 </clone> 7593 </dynamic> 7594 <static> 7595 <entry name="availableTestPatternModes" type="int32" visibility="public" optional="true" 7596 type_notes="list of enums" container="array"> 7597 <array> 7598 <size>n</size> 7599 </array> 7600 <description>List of sensor test pattern modes for android.sensor.testPatternMode 7601 supported by this camera device. 7602 </description> 7603 <range>Any value listed in android.sensor.testPatternMode</range> 7604 <details> 7605 Defaults to OFF, and always includes OFF if defined. 7606 </details> 7607 <hal_details> 7608 All custom modes must be >= CUSTOM1. 7609 </hal_details> 7610 </entry> 7611 </static> 7612 <dynamic> 7613 <entry name="rollingShutterSkew" type="int64" visibility="public" hwlevel="limited"> 7614 <description>Duration between the start of first row exposure 7615 and the start of last row exposure.</description> 7616 <units>Nanoseconds</units> 7617 <range> &gt;= 0 and &lt; 7618 {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.</range> 7619 <details> 7620 This is the exposure time skew between the first and last 7621 row exposure start times. The first row and the last row are 7622 the first and last rows inside of the 7623 android.sensor.info.activeArraySize. 7624 7625 For typical camera sensors that use rolling shutters, this is also equivalent 7626 to the frame readout time. 7627 </details> 7628 <hal_details> 7629 The HAL must report `0` if the sensor is using global shutter, where all pixels begin 7630 exposure at the same time. 7631 </hal_details> 7632 <tag id="V1" /> 7633 </entry> 7634 </dynamic> 7635 <static> 7636 <entry name="opticalBlackRegions" type="int32" visibility="public" optional="true" 7637 container="array" typedef="rectangle"> 7638 <array> 7639 <size>4</size> 7640 <size>num_regions</size> 7641 </array> 7642 <description>List of disjoint rectangles indicating the sensor 7643 optically shielded black pixel regions. 7644 </description> 7645 <details> 7646 In most camera sensors, the active array is surrounded by some 7647 optically shielded pixel areas. By blocking light, these pixels 7648 provides a reliable black reference for black level compensation 7649 in active array region. 7650 7651 This key provides a list of disjoint rectangles specifying the 7652 regions of optically shielded (with metal shield) black pixel 7653 regions if the camera device is capable of reading out these black 7654 pixels in the output raw images. In comparison to the fixed black 7655 level values reported by android.sensor.blackLevelPattern, this key 7656 may provide a more accurate way for the application to calculate 7657 black level of each captured raw images. 7658 7659 When this key is reported, the android.sensor.dynamicBlackLevel and 7660 android.sensor.dynamicWhiteLevel will also be reported. 7661 </details> 7662 <ndk_details> 7663 The data representation is `int[4]`, which maps to `(left, top, width, height)`. 7664 </ndk_details> 7665 <hal_details> 7666 This array contains (xmin, ymin, width, height). The (xmin, ymin) 7667 must be &gt;= (0,0) and &lt;= 7668 android.sensor.info.pixelArraySize. The (width, height) must be 7669 &lt;= android.sensor.info.pixelArraySize. Each region must be 7670 outside the region reported by 7671 android.sensor.info.preCorrectionActiveArraySize. 7672 7673 The HAL must report minimal number of disjoint regions for the 7674 optically shielded back pixel regions. For example, if a region can 7675 be covered by one rectangle, the HAL must not split this region into 7676 multiple rectangles. 7677 </hal_details> 7678 </entry> 7679 </static> 7680 <dynamic> 7681 <entry name="dynamicBlackLevel" type="float" visibility="public" 7682 optional="true" type_notes="2x2 raw count block" container="array"> 7683 <array> 7684 <size>4</size> 7685 </array> 7686 <description> 7687 A per-frame dynamic black level offset for each of the color filter 7688 arrangement (CFA) mosaic channels. 7689 </description> 7690 <range>&gt;= 0 for each.</range> 7691 <details> 7692 Camera sensor black levels may vary dramatically for different 7693 capture settings (e.g. android.sensor.sensitivity). The fixed black 7694 level reported by android.sensor.blackLevelPattern may be too 7695 inaccurate to represent the actual value on a per-frame basis. The 7696 camera device internal pipeline relies on reliable black level values 7697 to process the raw images appropriately. To get the best image 7698 quality, the camera device may choose to estimate the per frame black 7699 level values either based on optically shielded black regions 7700 (android.sensor.opticalBlackRegions) or its internal model. 7701 7702 This key reports the camera device estimated per-frame zero light 7703 value for each of the CFA mosaic channels in the camera sensor. The 7704 android.sensor.blackLevelPattern may only represent a coarse 7705 approximation of the actual black level values. This value is the 7706 black level used in camera device internal image processing pipeline 7707 and generally more accurate than the fixed black level values. 7708 However, since they are estimated values by the camera device, they 7709 may not be as accurate as the black level values calculated from the 7710 optical black pixels reported by android.sensor.opticalBlackRegions. 7711 7712 The values are given in the same order as channels listed for the CFA 7713 layout key (see android.sensor.info.colorFilterArrangement), i.e. the 7714 nth value given corresponds to the black level offset for the nth 7715 color channel listed in the CFA. 7716 7717 This key will be available if android.sensor.opticalBlackRegions is available or the 7718 camera device advertises this key via {@link 7719 android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}. 7720 </details> 7721 <hal_details> 7722 The values are given in row-column scan order, with the first value 7723 corresponding to the element of the CFA in row=0, column=0. 7724 </hal_details> 7725 <tag id="RAW" /> 7726 </entry> 7727 <entry name="dynamicWhiteLevel" type="int32" visibility="public" 7728 optional="true" > 7729 <description> 7730 Maximum raw value output by sensor for this frame. 7731 </description> 7732 <range> &gt;= 0</range> 7733 <details> 7734 Since the android.sensor.blackLevelPattern may change for different 7735 capture settings (e.g., android.sensor.sensitivity), the white 7736 level will change accordingly. This key is similar to 7737 android.sensor.info.whiteLevel, but specifies the camera device 7738 estimated white level for each frame. 7739 7740 This key will be available if android.sensor.opticalBlackRegions is 7741 available or the camera device advertises this key via 7742 {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}. 7743 </details> 7744 <hal_details> 7745 The full bit depth of the sensor must be available in the raw data, 7746 so the value for linear sensors should not be significantly lower 7747 than maximum raw value supported, i.e. 2^(sensor bits per pixel). 7748 </hal_details> 7749 <tag id="RAW" /> 7750 </entry> 7751 </dynamic> 7752 <static> 7753 <entry name="opaqueRawSize" type="int32" visibility="system" container="array"> 7754 <array> 7755 <size>n</size> 7756 <size>3</size> 7757 </array> 7758 <description>Size in bytes for all the listed opaque RAW buffer sizes</description> 7759 <range>Must be large enough to fit the opaque RAW of corresponding size produced by 7760 the camera</range> 7761 <details> 7762 This configurations are listed as `(width, height, size_in_bytes)` tuples. 7763 This is used for sizing the gralloc buffers for opaque RAW buffers. 7764 All RAW_OPAQUE output stream configuration listed in 7765 android.scaler.availableStreamConfigurations will have a corresponding tuple in 7766 this key. 7767 </details> 7768 <hal_details> 7769 This key is added in legacy HAL3.4. 7770 7771 For legacy HAL3.4 or above: devices advertising RAW_OPAQUE format output must list this 7772 key. For legacy HAL3.3 or earlier devices: if RAW_OPAQUE ouput is advertised, camera 7773 framework will derive this key by assuming each pixel takes two bytes and no padding bytes 7774 between rows. 7775 </hal_details> 7776 </entry> 7777 </static> 7778 </section> 7779 <section name="shading"> 7780 <controls> 7781 <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full"> 7782 <enum> 7783 <value>OFF 7784 <notes>No lens shading correction is applied.</notes></value> 7785 <value>FAST 7786 <notes>Apply lens shading corrections, without slowing 7787 frame rate relative to sensor raw output</notes></value> 7788 <value>HIGH_QUALITY 7789 <notes>Apply high-quality lens shading correction, at the 7790 cost of possibly reduced frame rate.</notes></value> 7791 </enum> 7792 <description>Quality of lens shading correction applied 7793 to the image data.</description> 7794 <range>android.shading.availableModes</range> 7795 <details> 7796 When set to OFF mode, no lens shading correction will be applied by the 7797 camera device, and an identity lens shading map data will be provided 7798 if `android.statistics.lensShadingMapMode == ON`. For example, for lens 7799 shading map with size of `[ 4, 3 ]`, 7800 the output android.statistics.lensShadingCorrectionMap for this case will be an identity 7801 map shown below: 7802 7803 [ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 7804 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 7805 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 7806 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 7807 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 7808 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] 7809 7810 When set to other modes, lens shading correction will be applied by the camera 7811 device. Applications can request lens shading map data by setting 7812 android.statistics.lensShadingMapMode to ON, and then the camera device will provide lens 7813 shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map 7814 data will be the one applied by the camera device for this capture request. 7815 7816 The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore 7817 the reliability of the map data may be affected by the AE and AWB algorithms. When AE and 7818 AWB are in AUTO modes(android.control.aeMode `!=` OFF and android.control.awbMode `!=` 7819 OFF), to get best results, it is recommended that the applications wait for the AE and AWB 7820 to be converged before using the returned shading map data. 7821 </details> 7822 </entry> 7823 <entry name="strength" type="byte"> 7824 <description>Control the amount of shading correction 7825 applied to the images</description> 7826 <units>unitless: 1-10; 10 is full shading 7827 compensation</units> 7828 <tag id="FUTURE" /> 7829 </entry> 7830 </controls> 7831 <dynamic> 7832 <clone entry="android.shading.mode" kind="controls"> 7833 </clone> 7834 </dynamic> 7835 <static> 7836 <entry name="availableModes" type="byte" visibility="public" 7837 type_notes="List of enums (android.shading.mode)." container="array" 7838 typedef="enumList" hwlevel="legacy"> 7839 <array> 7840 <size>n</size> 7841 </array> 7842 <description> 7843 List of lens shading modes for android.shading.mode that are supported by this camera device. 7844 </description> 7845 <range>Any value listed in android.shading.mode</range> 7846 <details> 7847 This list contains lens shading modes that can be set for the camera device. 7848 Camera devices that support the MANUAL_POST_PROCESSING capability will always 7849 list OFF and FAST mode. This includes all FULL level devices. 7850 LEGACY devices will always only support FAST mode. 7851 </details> 7852 <hal_details> 7853 HAL must support both FAST and HIGH_QUALITY if lens shading correction control is 7854 available on the camera device, but the underlying implementation can be the same for 7855 both modes. That is, if the highest quality implementation on the camera device does not 7856 slow down capture rate, then FAST and HIGH_QUALITY will generate the same output. 7857 </hal_details> 7858 </entry> 7859 </static> 7860 </section> 7861 <section name="statistics"> 7862 <controls> 7863 <entry name="faceDetectMode" type="byte" visibility="public" enum="true" 7864 hwlevel="legacy"> 7865 <enum> 7866 <value>OFF 7867 <notes>Do not include face detection statistics in capture 7868 results.</notes></value> 7869 <value optional="true">SIMPLE 7870 <notes>Return face rectangle and confidence values only. 7871 </notes></value> 7872 <value optional="true">FULL 7873 <notes>Return all face 7874 metadata. 7875 7876 In this mode, face rectangles, scores, landmarks, and face IDs are all valid. 7877 </notes></value> 7878 </enum> 7879 <description>Operating mode for the face detector 7880 unit.</description> 7881 <range>android.statistics.info.availableFaceDetectModes</range> 7882 <details>Whether face detection is enabled, and whether it 7883 should output just the basic fields or the full set of 7884 fields.</details> 7885 <hal_details> 7886 SIMPLE mode must fill in android.statistics.faceRectangles and 7887 android.statistics.faceScores. 7888 FULL mode must also fill in android.statistics.faceIds, and 7889 android.statistics.faceLandmarks. 7890 </hal_details> 7891 <tag id="BC" /> 7892 </entry> 7893 <entry name="histogramMode" type="byte" enum="true" typedef="boolean"> 7894 <enum> 7895 <value>OFF</value> 7896 <value>ON</value> 7897 </enum> 7898 <description>Operating mode for histogram 7899 generation</description> 7900 <tag id="FUTURE" /> 7901 </entry> 7902 <entry name="sharpnessMapMode" type="byte" enum="true" typedef="boolean"> 7903 <enum> 7904 <value>OFF</value> 7905 <value>ON</value> 7906 </enum> 7907 <description>Operating mode for sharpness map 7908 generation</description> 7909 <tag id="FUTURE" /> 7910 </entry> 7911 <entry name="hotPixelMapMode" type="byte" visibility="public" enum="true" 7912 typedef="boolean"> 7913 <enum> 7914 <value>OFF 7915 <notes>Hot pixel map production is disabled. 7916 </notes></value> 7917 <value>ON 7918 <notes>Hot pixel map production is enabled. 7919 </notes></value> 7920 </enum> 7921 <description> 7922 Operating mode for hot pixel map generation. 7923 </description> 7924 <range>android.statistics.info.availableHotPixelMapModes</range> 7925 <details> 7926 If set to `true`, a hot pixel map is returned in android.statistics.hotPixelMap. 7927 If set to `false`, no hot pixel map will be returned. 7928 </details> 7929 <tag id="V1" /> 7930 <tag id="RAW" /> 7931 </entry> 7932 </controls> 7933 <static> 7934 <namespace name="info"> 7935 <entry name="availableFaceDetectModes" type="byte" 7936 visibility="public" 7937 type_notes="List of enums from android.statistics.faceDetectMode" 7938 container="array" 7939 typedef="enumList" 7940 hwlevel="legacy"> 7941 <array> 7942 <size>n</size> 7943 </array> 7944 <description>List of face detection modes for android.statistics.faceDetectMode that are 7945 supported by this camera device. 7946 </description> 7947 <range>Any value listed in android.statistics.faceDetectMode</range> 7948 <details>OFF is always supported. 7949 </details> 7950 </entry> 7951 <entry name="histogramBucketCount" type="int32"> 7952 <description>Number of histogram buckets 7953 supported</description> 7954 <range>&gt;= 64</range> 7955 <tag id="FUTURE" /> 7956 </entry> 7957 <entry name="maxFaceCount" type="int32" visibility="public" hwlevel="legacy"> 7958 <description>The maximum number of simultaneously detectable 7959 faces.</description> 7960 <range>0 for cameras without available face detection; otherwise: 7961 `>=4` for LIMITED or FULL hwlevel devices or 7962 `>0` for LEGACY devices.</range> 7963 <tag id="BC" /> 7964 </entry> 7965 <entry name="maxHistogramCount" type="int32"> 7966 <description>Maximum value possible for a histogram 7967 bucket</description> 7968 <tag id="FUTURE" /> 7969 </entry> 7970 <entry name="maxSharpnessMapValue" type="int32"> 7971 <description>Maximum value possible for a sharpness map 7972 region.</description> 7973 <tag id="FUTURE" /> 7974 </entry> 7975 <entry name="sharpnessMapSize" type="int32" 7976 type_notes="width x height" container="array" typedef="size"> 7977 <array> 7978 <size>2</size> 7979 </array> 7980 <description>Dimensions of the sharpness 7981 map</description> 7982 <range>Must be at least 32 x 32</range> 7983 <tag id="FUTURE" /> 7984 </entry> 7985 <entry name="availableHotPixelMapModes" type="byte" visibility="public" 7986 type_notes="list of enums" container="array" typedef="boolean"> 7987 <array> 7988 <size>n</size> 7989 </array> 7990 <description> 7991 List of hot pixel map output modes for android.statistics.hotPixelMapMode that are 7992 supported by this camera device. 7993 </description> 7994 <range>Any value listed in android.statistics.hotPixelMapMode</range> 7995 <details> 7996 If no hotpixel map output is available for this camera device, this will contain only 7997 `false`. 7998 7999 ON is always supported on devices with the RAW capability. 8000 </details> 8001 <tag id="V1" /> 8002 <tag id="RAW" /> 8003 </entry> 8004 <entry name="availableLensShadingMapModes" type="byte" visibility="public" 8005 type_notes="list of enums" container="array" typedef="enumList"> 8006 <array> 8007 <size>n</size> 8008 </array> 8009 <description> 8010 List of lens shading map output modes for android.statistics.lensShadingMapMode that 8011 are supported by this camera device. 8012 </description> 8013 <range>Any value listed in android.statistics.lensShadingMapMode</range> 8014 <details> 8015 If no lens shading map output is available for this camera device, this key will 8016 contain only OFF. 8017 8018 ON is always supported on devices with the RAW capability. 8019 LEGACY mode devices will always only support OFF. 8020 </details> 8021 </entry> 8022 <entry name="availableOisDataModes" type="byte" visibility="public" 8023 type_notes="list of enums" container="array" typedef="enumList" hal_version="3.3"> 8024 <array> 8025 <size>n</size> 8026 </array> 8027 <description> 8028 List of OIS data output modes for android.statistics.oisDataMode that 8029 are supported by this camera device. 8030 </description> 8031 <range>Any value listed in android.statistics.oisDataMode</range> 8032 <details> 8033 If no OIS data output is available for this camera device, this key will 8034 contain only OFF. 8035 </details> 8036 </entry> 8037 </namespace> 8038 </static> 8039 <dynamic> 8040 <clone entry="android.statistics.faceDetectMode" 8041 kind="controls"></clone> 8042 <entry name="faceIds" type="int32" visibility="ndk_public" 8043 container="array" hwlevel="legacy"> 8044 <array> 8045 <size>n</size> 8046 </array> 8047 <description>List of unique IDs for detected faces.</description> 8048 <details> 8049 Each detected face is given a unique ID that is valid for as long as the face is visible 8050 to the camera device. A face that leaves the field of view and later returns may be 8051 assigned a new ID. 8052 8053 Only available if android.statistics.faceDetectMode == FULL</details> 8054 <tag id="BC" /> 8055 </entry> 8056 <entry name="faceLandmarks" type="int32" visibility="ndk_public" 8057 type_notes="(leftEyeX, leftEyeY, rightEyeX, rightEyeY, mouthX, mouthY)" 8058 container="array" hwlevel="legacy"> 8059 <array> 8060 <size>n</size> 8061 <size>6</size> 8062 </array> 8063 <description>List of landmarks for detected 8064 faces.</description> 8065 <details> 8066 The coordinate system is that of android.sensor.info.activeArraySize, with 8067 `(0, 0)` being the top-left pixel of the active array. 8068 8069 Only available if android.statistics.faceDetectMode == FULL</details> 8070 <tag id="BC" /> 8071 </entry> 8072 <entry name="faceRectangles" type="int32" visibility="ndk_public" 8073 type_notes="(xmin, ymin, xmax, ymax). (0,0) is top-left of active pixel area" 8074 container="array" typedef="rectangle" hwlevel="legacy"> 8075 <array> 8076 <size>n</size> 8077 <size>4</size> 8078 </array> 8079 <description>List of the bounding rectangles for detected 8080 faces.</description> 8081 <details> 8082 The coordinate system is that of android.sensor.info.activeArraySize, with 8083 `(0, 0)` being the top-left pixel of the active array. 8084 8085 Only available if android.statistics.faceDetectMode != OFF</details> 8086 <ndk_details> 8087 The data representation is `int[4]`, which maps to `(left, top, width, height)`. 8088 </ndk_details> 8089 <tag id="BC" /> 8090 </entry> 8091 <entry name="faceScores" type="byte" visibility="ndk_public" 8092 container="array" hwlevel="legacy"> 8093 <array> 8094 <size>n</size> 8095 </array> 8096 <description>List of the face confidence scores for 8097 detected faces</description> 8098 <range>1-100</range> 8099 <details>Only available if android.statistics.faceDetectMode != OFF. 8100 </details> 8101 <hal_details> 8102 The value should be meaningful (for example, setting 100 at 8103 all times is illegal).</hal_details> 8104 <tag id="BC" /> 8105 </entry> 8106 <entry name="faces" type="int32" visibility="java_public" synthetic="true" 8107 container="array" typedef="face" hwlevel="legacy"> 8108 <array> 8109 <size>n</size> 8110 </array> 8111 <description>List of the faces detected through camera face detection 8112 in this capture.</description> 8113 <details> 8114 Only available if android.statistics.faceDetectMode `!=` OFF. 8115 </details> 8116 </entry> 8117 <entry name="histogram" type="int32" 8118 type_notes="count of pixels for each color channel that fall into each histogram bucket, scaled to be between 0 and maxHistogramCount" 8119 container="array"> 8120 <array> 8121 <size>n</size> 8122 <size>3</size> 8123 </array> 8124 <description>A 3-channel histogram based on the raw 8125 sensor data</description> 8126 <details>The k'th bucket (0-based) covers the input range 8127 (with w = android.sensor.info.whiteLevel) of [ k * w/N, 8128 (k + 1) * w / N ). If only a monochrome sharpness map is 8129 supported, all channels should have the same data</details> 8130 <tag id="FUTURE" /> 8131 </entry> 8132 <clone entry="android.statistics.histogramMode" 8133 kind="controls"></clone> 8134 <entry name="sharpnessMap" type="int32" 8135 type_notes="estimated sharpness for each region of the input image. Normalized to be between 0 and maxSharpnessMapValue. Higher values mean sharper (better focused)" 8136 container="array"> 8137 <array> 8138 <size>n</size> 8139 <size>m</size> 8140 <size>3</size> 8141 </array> 8142 <description>A 3-channel sharpness map, based on the raw 8143 sensor data</description> 8144 <details>If only a monochrome sharpness map is supported, 8145 all channels should have the same data</details> 8146 <tag id="FUTURE" /> 8147 </entry> 8148 <clone entry="android.statistics.sharpnessMapMode" 8149 kind="controls"></clone> 8150 <entry name="lensShadingCorrectionMap" type="byte" visibility="java_public" 8151 typedef="lensShadingMap" hwlevel="full"> 8152 <description>The shading map is a low-resolution floating-point map 8153 that lists the coefficients used to correct for vignetting, for each 8154 Bayer color channel.</description> 8155 <range>Each gain factor is &gt;= 1</range> 8156 <details> 8157 The map provided here is the same map that is used by the camera device to 8158 correct both color shading and vignetting for output non-RAW images. 8159 8160 When there is no lens shading correction applied to RAW 8161 output images (android.sensor.info.lensShadingApplied `==` 8162 false), this map is the complete lens shading correction 8163 map; when there is some lens shading correction applied to 8164 the RAW output image (android.sensor.info.lensShadingApplied 8165 `==` true), this map reports the remaining lens shading 8166 correction map that needs to be applied to get shading 8167 corrected images that match the camera device's output for 8168 non-RAW formats. 8169 8170 For a complete shading correction map, the least shaded 8171 section of the image will have a gain factor of 1; all 8172 other sections will have gains above 1. 8173 8174 When android.colorCorrection.mode = TRANSFORM_MATRIX, the map 8175 will take into account the colorCorrection settings. 8176 8177 The shading map is for the entire active pixel array, and is not 8178 affected by the crop region specified in the request. Each shading map 8179 entry is the value of the shading compensation map over a specific 8180 pixel on the sensor. Specifically, with a (N x M) resolution shading 8181 map, and an active pixel array size (W x H), shading map entry 8182 (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at 8183 pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels. 8184 The map is assumed to be bilinearly interpolated between the sample points. 8185 8186 The channel order is [R, Geven, Godd, B], where Geven is the green 8187 channel for the even rows of a Bayer pattern, and Godd is the odd rows. 8188 The shading map is stored in a fully interleaved format. 8189 8190 The shading map will generally have on the order of 30-40 rows and columns, 8191 and will be smaller than 64x64. 8192 8193 As an example, given a very small map defined as: 8194 8195 width,height = [ 4, 3 ] 8196 values = 8197 [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2, 8198 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3, 8199 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0, 8200 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2, 8201 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2, 8202 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ] 8203 8204 The low-resolution scaling map images for each channel are 8205 (displayed using nearest-neighbor interpolation): 8206 8207  8208  8209  8210  8211 8212 As a visualization only, inverting the full-color map to recover an 8213 image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives: 8214 8215  8216 </details> 8217 </entry> 8218 <entry name="lensShadingMap" type="float" visibility="ndk_public" 8219 type_notes="2D array of float gain factors per channel to correct lens shading" 8220 container="array" hwlevel="full"> 8221 <array> 8222 <size>4</size> 8223 <size>n</size> 8224 <size>m</size> 8225 </array> 8226 <description>The shading map is a low-resolution floating-point map 8227 that lists the coefficients used to correct for vignetting and color shading, 8228 for each Bayer color channel of RAW image data.</description> 8229 <range>Each gain factor is &gt;= 1</range> 8230 <details> 8231 The map provided here is the same map that is used by the camera device to 8232 correct both color shading and vignetting for output non-RAW images. 8233 8234 When there is no lens shading correction applied to RAW 8235 output images (android.sensor.info.lensShadingApplied `==` 8236 false), this map is the complete lens shading correction 8237 map; when there is some lens shading correction applied to 8238 the RAW output image (android.sensor.info.lensShadingApplied 8239 `==` true), this map reports the remaining lens shading 8240 correction map that needs to be applied to get shading 8241 corrected images that match the camera device's output for 8242 non-RAW formats. 8243 8244 For a complete shading correction map, the least shaded 8245 section of the image will have a gain factor of 1; all 8246 other sections will have gains above 1. 8247 8248 When android.colorCorrection.mode = TRANSFORM_MATRIX, the map 8249 will take into account the colorCorrection settings. 8250 8251 The shading map is for the entire active pixel array, and is not 8252 affected by the crop region specified in the request. Each shading map 8253 entry is the value of the shading compensation map over a specific 8254 pixel on the sensor. Specifically, with a (N x M) resolution shading 8255 map, and an active pixel array size (W x H), shading map entry 8256 (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at 8257 pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels. 8258 The map is assumed to be bilinearly interpolated between the sample points. 8259 8260 The channel order is [R, Geven, Godd, B], where Geven is the green 8261 channel for the even rows of a Bayer pattern, and Godd is the odd rows. 8262 The shading map is stored in a fully interleaved format, and its size 8263 is provided in the camera static metadata by android.lens.info.shadingMapSize. 8264 8265 The shading map will generally have on the order of 30-40 rows and columns, 8266 and will be smaller than 64x64. 8267 8268 As an example, given a very small map defined as: 8269 8270 android.lens.info.shadingMapSize = [ 4, 3 ] 8271 android.statistics.lensShadingMap = 8272 [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2, 8273 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3, 8274 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0, 8275 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2, 8276 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2, 8277 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ] 8278 8279 The low-resolution scaling map images for each channel are 8280 (displayed using nearest-neighbor interpolation): 8281 8282  8283  8284  8285  8286 8287 As a visualization only, inverting the full-color map to recover an 8288 image of a gray wall (using bicubic interpolation for visual quality) 8289 as captured by the sensor gives: 8290 8291  8292 8293 Note that the RAW image data might be subject to lens shading 8294 correction not reported on this map. Query 8295 android.sensor.info.lensShadingApplied to see if RAW image data has subject 8296 to lens shading correction. If android.sensor.info.lensShadingApplied 8297 is TRUE, the RAW image data is subject to partial or full lens shading 8298 correction. In the case full lens shading correction is applied to RAW 8299 images, the gain factor map reported in this key will contain all 1.0 gains. 8300 In other words, the map reported in this key is the remaining lens shading 8301 that needs to be applied on the RAW image to get images without lens shading 8302 artifacts. See android.request.maxNumOutputRaw for a list of RAW image 8303 formats. 8304 </details> 8305 <hal_details> 8306 The lens shading map calculation may depend on exposure and white balance statistics. 8307 When AE and AWB are in AUTO modes 8308 (android.control.aeMode `!=` OFF and android.control.awbMode `!=` OFF), the HAL 8309 may have all the information it need to generate most accurate lens shading map. When 8310 AE or AWB are in manual mode 8311 (android.control.aeMode `==` OFF or android.control.awbMode `==` OFF), the shading map 8312 may be adversely impacted by manual exposure or white balance parameters. To avoid 8313 generating unreliable shading map data, the HAL may choose to lock the shading map with 8314 the latest known good map generated when the AE and AWB are in AUTO modes. 8315 </hal_details> 8316 </entry> 8317 <entry name="predictedColorGains" type="float" 8318 visibility="hidden" 8319 deprecated="true" 8320 optional="true" 8321 type_notes="A 1D array of floats for 4 color channel gains" 8322 container="array"> 8323 <array> 8324 <size>4</size> 8325 </array> 8326 <description>The best-fit color channel gains calculated 8327 by the camera device's statistics units for the current output frame. 8328 </description> 8329 <deprecation_description> 8330 Never fully implemented or specified; do not use 8331 </deprecation_description> 8332 <details> 8333 This may be different than the gains used for this frame, 8334 since statistics processing on data from a new frame 8335 typically completes after the transform has already been 8336 applied to that frame. 8337 8338 The 4 channel gains are defined in Bayer domain, 8339 see android.colorCorrection.gains for details. 8340 8341 This value should always be calculated by the auto-white balance (AWB) block, 8342 regardless of the android.control.* current values. 8343 </details> 8344 </entry> 8345 <entry name="predictedColorTransform" type="rational" 8346 visibility="hidden" 8347 deprecated="true" 8348 optional="true" 8349 type_notes="3x3 rational matrix in row-major order" 8350 container="array"> 8351 <array> 8352 <size>3</size> 8353 <size>3</size> 8354 </array> 8355 <description>The best-fit color transform matrix estimate 8356 calculated by the camera device's statistics units for the current 8357 output frame.</description> 8358 <deprecation_description> 8359 Never fully implemented or specified; do not use 8360 </deprecation_description> 8361 <details>The camera device will provide the estimate from its 8362 statistics unit on the white balance transforms to use 8363 for the next frame. These are the values the camera device believes 8364 are the best fit for the current output frame. This may 8365 be different than the transform used for this frame, since 8366 statistics processing on data from a new frame typically 8367 completes after the transform has already been applied to 8368 that frame. 8369 8370 These estimates must be provided for all frames, even if 8371 capture settings and color transforms are set by the application. 8372 8373 This value should always be calculated by the auto-white balance (AWB) block, 8374 regardless of the android.control.* current values. 8375 </details> 8376 </entry> 8377 <entry name="sceneFlicker" type="byte" visibility="public" enum="true" 8378 hwlevel="full"> 8379 <enum> 8380 <value>NONE 8381 <notes>The camera device does not detect any flickering illumination 8382 in the current scene.</notes></value> 8383 <value>50HZ 8384 <notes>The camera device detects illumination flickering at 50Hz 8385 in the current scene.</notes></value> 8386 <value>60HZ 8387 <notes>The camera device detects illumination flickering at 60Hz 8388 in the current scene.</notes></value> 8389 </enum> 8390 <description>The camera device estimated scene illumination lighting 8391 frequency.</description> 8392 <details> 8393 Many light sources, such as most fluorescent lights, flicker at a rate 8394 that depends on the local utility power standards. This flicker must be 8395 accounted for by auto-exposure routines to avoid artifacts in captured images. 8396 The camera device uses this entry to tell the application what the scene 8397 illuminant frequency is. 8398 8399 When manual exposure control is enabled 8400 (`android.control.aeMode == OFF` or `android.control.mode == 8401 OFF`), the android.control.aeAntibandingMode doesn't perform 8402 antibanding, and the application can ensure it selects 8403 exposure times that do not cause banding issues by looking 8404 into this metadata field. See 8405 android.control.aeAntibandingMode for more details. 8406 8407 Reports NONE if there doesn't appear to be flickering illumination. 8408 </details> 8409 </entry> 8410 <clone entry="android.statistics.hotPixelMapMode" kind="controls"> 8411 </clone> 8412 <entry name="hotPixelMap" type="int32" visibility="public" 8413 type_notes="list of coordinates based on android.sensor.pixelArraySize" 8414 container="array" typedef="point"> 8415 <array> 8416 <size>2</size> 8417 <size>n</size> 8418 </array> 8419 <description> 8420 List of `(x, y)` coordinates of hot/defective pixels on the sensor. 8421 </description> 8422 <range> 8423 n <= number of pixels on the sensor. 8424 The `(x, y)` coordinates must be bounded by 8425 android.sensor.info.pixelArraySize. 8426 </range> 8427 <details> 8428 A coordinate `(x, y)` must lie between `(0, 0)`, and 8429 `(width - 1, height - 1)` (inclusive), which are the top-left and 8430 bottom-right of the pixel array, respectively. The width and 8431 height dimensions are given in android.sensor.info.pixelArraySize. 8432 This may include hot pixels that lie outside of the active array 8433 bounds given by android.sensor.info.activeArraySize. 8434 </details> 8435 <hal_details> 8436 A hotpixel map contains the coordinates of pixels on the camera 8437 sensor that do report valid values (usually due to defects in 8438 the camera sensor). This includes pixels that are stuck at certain 8439 values, or have a response that does not accuractly encode the 8440 incoming light from the scene. 8441 8442 To avoid performance issues, there should be significantly fewer hot 8443 pixels than actual pixels on the camera sensor. 8444 </hal_details> 8445 <tag id="V1" /> 8446 <tag id="RAW" /> 8447 </entry> 8448 </dynamic> 8449 <controls> 8450 <entry name="lensShadingMapMode" type="byte" visibility="public" enum="true" hwlevel="full"> 8451 <enum> 8452 <value>OFF 8453 <notes>Do not include a lens shading map in the capture result.</notes></value> 8454 <value>ON 8455 <notes>Include a lens shading map in the capture result.</notes></value> 8456 </enum> 8457 <description>Whether the camera device will output the lens 8458 shading map in output result metadata.</description> 8459 <range>android.statistics.info.availableLensShadingMapModes</range> 8460 <details>When set to ON, 8461 android.statistics.lensShadingMap will be provided in 8462 the output result metadata. 8463 8464 ON is always supported on devices with the RAW capability. 8465 </details> 8466 <tag id="RAW" /> 8467 </entry> 8468 </controls> 8469 <dynamic> 8470 <clone entry="android.statistics.lensShadingMapMode" kind="controls"> 8471 </clone> 8472 </dynamic> 8473 <controls> 8474 <entry name="oisDataMode" type="byte" visibility="public" enum="true" hal_version="3.3"> 8475 <enum> 8476 <value>OFF 8477 <notes>Do not include OIS data in the capture result.</notes></value> 8478 <value>ON 8479 <notes>Include OIS data in the capture result.</notes> 8480 <sdk_notes>android.statistics.oisSamples provides OIS sample data in the 8481 output result metadata. 8482 </sdk_notes> 8483 <ndk_notes>android.statistics.oisTimestamps, android.statistics.oisXShifts, 8484 and android.statistics.oisYShifts provide OIS data in the output result metadata. 8485 </ndk_notes> 8486 </value> 8487 </enum> 8488 <description>A control for selecting whether OIS position information is included in output 8489 result metadata.</description> 8490 <range>android.statistics.info.availableOisDataModes</range> 8491 </entry> 8492 </controls> 8493 <dynamic> 8494 <clone entry="android.statistics.oisDataMode" kind="controls"> 8495 </clone> 8496 <entry name="oisTimestamps" type="int64" visibility="ndk_public" container="array" hal_version="3.3"> 8497 <array> 8498 <size>n</size> 8499 </array> 8500 <description> 8501 An array of timestamps of OIS samples, in nanoseconds. 8502 </description> 8503 <units>nanoseconds</units> 8504 <details> 8505 The array contains the timestamps of OIS samples. The timestamps are in the same 8506 timebase as and comparable to android.sensor.timestamp. 8507 </details> 8508 </entry> 8509 <entry name="oisXShifts" type="float" visibility="ndk_public" container="array" hal_version="3.3"> 8510 <array> 8511 <size>n</size> 8512 </array> 8513 <description> 8514 An array of shifts of OIS samples, in x direction. 8515 </description> 8516 <units>Pixels in active array.</units> 8517 <details> 8518 The array contains the amount of shifts in x direction, in pixels, based on OIS samples. 8519 A positive value is a shift from left to right in active array coordinate system. For 8520 example, if the optical center is (1000, 500) in active array coordinates, a shift of 8521 (3, 0) puts the new optical center at (1003, 500). 8522 8523 The number of shifts must match the number of timestamps in 8524 android.statistics.oisTimestamps. 8525 </details> 8526 </entry> 8527 <entry name="oisYShifts" type="float" visibility="ndk_public" container="array" hal_version="3.3"> 8528 <array> 8529 <size>n</size> 8530 </array> 8531 <description> 8532 An array of shifts of OIS samples, in y direction. 8533 </description> 8534 <units>Pixels in active array.</units> 8535 <details> 8536 The array contains the amount of shifts in y direction, in pixels, based on OIS samples. 8537 A positive value is a shift from top to bottom in active array coordinate system. For 8538 example, if the optical center is (1000, 500) in active array coordinates, a shift of 8539 (0, 5) puts the new optical center at (1000, 505). 8540 8541 The number of shifts must match the number of timestamps in 8542 android.statistics.oisTimestamps. 8543 </details> 8544 </entry> 8545 <entry name="oisSamples" type="float" visibility="java_public" synthetic="true" 8546 container="array" typedef="oisSample" hal_version="3.3"> 8547 <array> 8548 <size>n</size> 8549 </array> 8550 <description> 8551 An array of OIS samples. 8552 </description> 8553 <details> 8554 Each OIS sample contains the timestamp and the amount of shifts in x and y direction, 8555 in pixels, of the OIS sample. 8556 8557 A positive value for a shift in x direction is a shift from left to right in active array 8558 coordinate system. For example, if the optical center is (1000, 500) in active array 8559 coordinates, a shift of (3, 0) puts the new optical center at (1003, 500). 8560 8561 A positive value for a shift in y direction is a shift from top to bottom in active array 8562 coordinate system. For example, if the optical center is (1000, 500) in active array 8563 coordinates, a shift of (0, 5) puts the new optical center at (1000, 505). 8564 </details> 8565 </entry> 8566 </dynamic> 8567 </section> 8568 <section name="tonemap"> 8569 <controls> 8570 <entry name="curveBlue" type="float" visibility="ndk_public" 8571 type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints." 8572 container="array" hwlevel="full"> 8573 <array> 8574 <size>n</size> 8575 <size>2</size> 8576 </array> 8577 <description>Tonemapping / contrast / gamma curve for the blue 8578 channel, to use when android.tonemap.mode is 8579 CONTRAST_CURVE.</description> 8580 <details>See android.tonemap.curveRed for more details.</details> 8581 </entry> 8582 <entry name="curveGreen" type="float" visibility="ndk_public" 8583 type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints." 8584 container="array" hwlevel="full"> 8585 <array> 8586 <size>n</size> 8587 <size>2</size> 8588 </array> 8589 <description>Tonemapping / contrast / gamma curve for the green 8590 channel, to use when android.tonemap.mode is 8591 CONTRAST_CURVE.</description> 8592 <details>See android.tonemap.curveRed for more details.</details> 8593 </entry> 8594 <entry name="curveRed" type="float" visibility="ndk_public" 8595 type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints." 8596 container="array" hwlevel="full"> 8597 <array> 8598 <size>n</size> 8599 <size>2</size> 8600 </array> 8601 <description>Tonemapping / contrast / gamma curve for the red 8602 channel, to use when android.tonemap.mode is 8603 CONTRAST_CURVE.</description> 8604 <range>0-1 on both input and output coordinates, normalized 8605 as a floating-point value such that 0 == black and 1 == white. 8606 </range> 8607 <details> 8608 Each channel's curve is defined by an array of control points: 8609 8610 android.tonemap.curveRed = 8611 [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ] 8612 2 <= N <= android.tonemap.maxCurvePoints 8613 8614 These are sorted in order of increasing `Pin`; it is 8615 required that input values 0.0 and 1.0 are included in the list to 8616 define a complete mapping. For input values between control points, 8617 the camera device must linearly interpolate between the control 8618 points. 8619 8620 Each curve can have an independent number of points, and the number 8621 of points can be less than max (that is, the request doesn't have to 8622 always provide a curve with number of points equivalent to 8623 android.tonemap.maxCurvePoints). 8624 8625 For devices with MONOCHROME capability, only red channel is used. Green and blue channels 8626 are ignored. 8627 8628 A few examples, and their corresponding graphical mappings; these 8629 only specify the red channel and the precision is limited to 4 8630 digits, for conciseness. 8631 8632 Linear mapping: 8633 8634 android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ] 8635 8636  8637 8638 Invert mapping: 8639 8640 android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ] 8641 8642  8643 8644 Gamma 1/2.2 mapping, with 16 control points: 8645 8646 android.tonemap.curveRed = [ 8647 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812, 8648 0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072, 8649 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685, 8650 0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ] 8651 8652  8653 8654 Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points: 8655 8656 android.tonemap.curveRed = [ 8657 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845, 8658 0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130, 8659 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721, 8660 0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ] 8661 8662  8663 </details> 8664 <hal_details> 8665 For good quality of mapping, at least 128 control points are 8666 preferred. 8667 8668 A typical use case of this would be a gamma-1/2.2 curve, with as many 8669 control points used as are available. 8670 </hal_details> 8671 </entry> 8672 <entry name="curve" type="float" visibility="java_public" synthetic="true" 8673 typedef="tonemapCurve" 8674 hwlevel="full"> 8675 <description>Tonemapping / contrast / gamma curve to use when android.tonemap.mode 8676 is CONTRAST_CURVE.</description> 8677 <details> 8678 The tonemapCurve consist of three curves for each of red, green, and blue 8679 channels respectively. The following example uses the red channel as an 8680 example. The same logic applies to green and blue channel. 8681 Each channel's curve is defined by an array of control points: 8682 8683 curveRed = 8684 [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ] 8685 2 <= N <= android.tonemap.maxCurvePoints 8686 8687 These are sorted in order of increasing `Pin`; it is always 8688 guaranteed that input values 0.0 and 1.0 are included in the list to 8689 define a complete mapping. For input values between control points, 8690 the camera device must linearly interpolate between the control 8691 points. 8692 8693 Each curve can have an independent number of points, and the number 8694 of points can be less than max (that is, the request doesn't have to 8695 always provide a curve with number of points equivalent to 8696 android.tonemap.maxCurvePoints). 8697 8698 For devices with MONOCHROME capability, only red channel is used. Green and blue channels 8699 are ignored. 8700 8701 A few examples, and their corresponding graphical mappings; these 8702 only specify the red channel and the precision is limited to 4 8703 digits, for conciseness. 8704 8705 Linear mapping: 8706 8707 curveRed = [ (0, 0), (1.0, 1.0) ] 8708 8709  8710 8711 Invert mapping: 8712 8713 curveRed = [ (0, 1.0), (1.0, 0) ] 8714 8715  8716 8717 Gamma 1/2.2 mapping, with 16 control points: 8718 8719 curveRed = [ 8720 (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812), 8721 (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072), 8722 (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685), 8723 (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ] 8724 8725  8726 8727 Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points: 8728 8729 curveRed = [ 8730 (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845), 8731 (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130), 8732 (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721), 8733 (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ] 8734 8735  8736 </details> 8737 <hal_details> 8738 This entry is created by the framework from the curveRed, curveGreen and 8739 curveBlue entries. 8740 </hal_details> 8741 </entry> 8742 <entry name="mode" type="byte" visibility="public" enum="true" 8743 hwlevel="full"> 8744 <enum> 8745 <value>CONTRAST_CURVE 8746 <notes>Use the tone mapping curve specified in 8747 the android.tonemap.curve* entries. 8748 8749 All color enhancement and tonemapping must be disabled, except 8750 for applying the tonemapping curve specified by 8751 android.tonemap.curve. 8752 8753 Must not slow down frame rate relative to raw 8754 sensor output. 8755 </notes> 8756 </value> 8757 <value>FAST 8758 <notes> 8759 Advanced gamma mapping and color enhancement may be applied, without 8760 reducing frame rate compared to raw sensor output. 8761 </notes> 8762 </value> 8763 <value>HIGH_QUALITY 8764 <notes> 8765 High-quality gamma mapping and color enhancement will be applied, at 8766 the cost of possibly reduced frame rate compared to raw sensor output. 8767 </notes> 8768 </value> 8769 <value>GAMMA_VALUE 8770 <notes> 8771 Use the gamma value specified in android.tonemap.gamma to peform 8772 tonemapping. 8773 8774 All color enhancement and tonemapping must be disabled, except 8775 for applying the tonemapping curve specified by android.tonemap.gamma. 8776 8777 Must not slow down frame rate relative to raw sensor output. 8778 </notes> 8779 </value> 8780 <value>PRESET_CURVE 8781 <notes> 8782 Use the preset tonemapping curve specified in 8783 android.tonemap.presetCurve to peform tonemapping. 8784 8785 All color enhancement and tonemapping must be disabled, except 8786 for applying the tonemapping curve specified by 8787 android.tonemap.presetCurve. 8788 8789 Must not slow down frame rate relative to raw sensor output. 8790 </notes> 8791 </value> 8792 </enum> 8793 <description>High-level global contrast/gamma/tonemapping control. 8794 </description> 8795 <range>android.tonemap.availableToneMapModes</range> 8796 <details> 8797 When switching to an application-defined contrast curve by setting 8798 android.tonemap.mode to CONTRAST_CURVE, the curve is defined 8799 per-channel with a set of `(in, out)` points that specify the 8800 mapping from input high-bit-depth pixel value to the output 8801 low-bit-depth value. Since the actual pixel ranges of both input 8802 and output may change depending on the camera pipeline, the values 8803 are specified by normalized floating-point numbers. 8804 8805 More-complex color mapping operations such as 3D color look-up 8806 tables, selective chroma enhancement, or other non-linear color 8807 transforms will be disabled when android.tonemap.mode is 8808 CONTRAST_CURVE. 8809 8810 When using either FAST or HIGH_QUALITY, the camera device will 8811 emit its own tonemap curve in android.tonemap.curve. 8812 These values are always available, and as close as possible to the 8813 actually used nonlinear/nonglobal transforms. 8814 8815 If a request is sent with CONTRAST_CURVE with the camera device's 8816 provided curve in FAST or HIGH_QUALITY, the image's tonemap will be 8817 roughly the same.</details> 8818 </entry> 8819 </controls> 8820 <static> 8821 <entry name="maxCurvePoints" type="int32" visibility="public" 8822 hwlevel="full"> 8823 <description>Maximum number of supported points in the 8824 tonemap curve that can be used for android.tonemap.curve. 8825 </description> 8826 <details> 8827 If the actual number of points provided by the application (in android.tonemap.curve*) is 8828 less than this maximum, the camera device will resample the curve to its internal 8829 representation, using linear interpolation. 8830 8831 The output curves in the result metadata may have a different number 8832 of points than the input curves, and will represent the actual 8833 hardware curves used as closely as possible when linearly interpolated. 8834 </details> 8835 <hal_details> 8836 This value must be at least 64. This should be at least 128. 8837 </hal_details> 8838 </entry> 8839 <entry name="availableToneMapModes" type="byte" visibility="public" 8840 type_notes="list of enums" container="array" typedef="enumList" hwlevel="full"> 8841 <array> 8842 <size>n</size> 8843 </array> 8844 <description> 8845 List of tonemapping modes for android.tonemap.mode that are supported by this camera 8846 device. 8847 </description> 8848 <range>Any value listed in android.tonemap.mode</range> 8849 <details> 8850 Camera devices that support the MANUAL_POST_PROCESSING capability will always contain 8851 at least one of below mode combinations: 8852 8853 * CONTRAST_CURVE, FAST and HIGH_QUALITY 8854 * GAMMA_VALUE, PRESET_CURVE, FAST and HIGH_QUALITY 8855 8856 This includes all FULL level devices. 8857 </details> 8858 <hal_details> 8859 HAL must support both FAST and HIGH_QUALITY if automatic tonemap control is available 8860 on the camera device, but the underlying implementation can be the same for both modes. 8861 That is, if the highest quality implementation on the camera device does not slow down 8862 capture rate, then FAST and HIGH_QUALITY will generate the same output. 8863 </hal_details> 8864 </entry> 8865 </static> 8866 <dynamic> 8867 <clone entry="android.tonemap.curveBlue" kind="controls"> 8868 </clone> 8869 <clone entry="android.tonemap.curveGreen" kind="controls"> 8870 </clone> 8871 <clone entry="android.tonemap.curveRed" kind="controls"> 8872 </clone> 8873 <clone entry="android.tonemap.curve" kind="controls"> 8874 </clone> 8875 <clone entry="android.tonemap.mode" kind="controls"> 8876 </clone> 8877 </dynamic> 8878 <controls> 8879 <entry name="gamma" type="float" visibility="public"> 8880 <description> Tonemapping curve to use when android.tonemap.mode is 8881 GAMMA_VALUE 8882 </description> 8883 <details> 8884 The tonemap curve will be defined the following formula: 8885 * OUT = pow(IN, 1.0 / gamma) 8886 where IN and OUT is the input pixel value scaled to range [0.0, 1.0], 8887 pow is the power function and gamma is the gamma value specified by this 8888 key. 8889 8890 The same curve will be applied to all color channels. The camera device 8891 may clip the input gamma value to its supported range. The actual applied 8892 value will be returned in capture result. 8893 8894 The valid range of gamma value varies on different devices, but values 8895 within [1.0, 5.0] are guaranteed not to be clipped. 8896 </details> 8897 </entry> 8898 <entry name="presetCurve" type="byte" visibility="public" enum="true"> 8899 <enum> 8900 <value>SRGB 8901 <notes>Tonemapping curve is defined by sRGB</notes> 8902 </value> 8903 <value>REC709 8904 <notes>Tonemapping curve is defined by ITU-R BT.709</notes> 8905 </value> 8906 </enum> 8907 <description> Tonemapping curve to use when android.tonemap.mode is 8908 PRESET_CURVE 8909 </description> 8910 <details> 8911 The tonemap curve will be defined by specified standard. 8912 8913 sRGB (approximated by 16 control points): 8914 8915  8916 8917 Rec. 709 (approximated by 16 control points): 8918 8919  8920 8921 Note that above figures show a 16 control points approximation of preset 8922 curves. Camera devices may apply a different approximation to the curve. 8923 </details> 8924 </entry> 8925 </controls> 8926 <dynamic> 8927 <clone entry="android.tonemap.gamma" kind="controls"> 8928 </clone> 8929 <clone entry="android.tonemap.presetCurve" kind="controls"> 8930 </clone> 8931 </dynamic> 8932 </section> 8933 <section name="led"> 8934 <controls> 8935 <entry name="transmit" type="byte" visibility="hidden" optional="true" 8936 enum="true" typedef="boolean"> 8937 <enum> 8938 <value>OFF</value> 8939 <value>ON</value> 8940 </enum> 8941 <description>This LED is nominally used to indicate to the user 8942 that the camera is powered on and may be streaming images back to the 8943 Application Processor. In certain rare circumstances, the OS may 8944 disable this when video is processed locally and not transmitted to 8945 any untrusted applications. 8946 8947 In particular, the LED *must* always be on when the data could be 8948 transmitted off the device. The LED *should* always be on whenever 8949 data is stored locally on the device. 8950 8951 The LED *may* be off if a trusted application is using the data that 8952 doesn't violate the above rules. 8953 </description> 8954 </entry> 8955 </controls> 8956 <dynamic> 8957 <clone entry="android.led.transmit" kind="controls"></clone> 8958 </dynamic> 8959 <static> 8960 <entry name="availableLeds" type="byte" visibility="hidden" optional="true" 8961 enum="true" 8962 container="array"> 8963 <array> 8964 <size>n</size> 8965 </array> 8966 <enum> 8967 <value>TRANSMIT 8968 <notes>android.led.transmit control is used.</notes> 8969 </value> 8970 </enum> 8971 <description>A list of camera LEDs that are available on this system. 8972 </description> 8973 </entry> 8974 </static> 8975 </section> 8976 <section name="info"> 8977 <static> 8978 <entry name="supportedHardwareLevel" type="byte" visibility="public" 8979 enum="true" hwlevel="legacy"> 8980 <enum> 8981 <value> 8982 LIMITED 8983 <notes> 8984 This camera device does not have enough capabilities to qualify as a `FULL` device or 8985 better. 8986 8987 Only the stream configurations listed in the `LEGACY` and `LIMITED` tables in the 8988 {@link android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession 8989 createCaptureSession} documentation are guaranteed to be supported. 8990 8991 All `LIMITED` devices support the `BACKWARDS_COMPATIBLE` capability, indicating basic 8992 support for color image capture. The only exception is that the device may 8993 alternatively support only the `DEPTH_OUTPUT` capability, if it can only output depth 8994 measurements and not color images. 8995 8996 `LIMITED` devices and above require the use of android.control.aePrecaptureTrigger 8997 to lock exposure metering (and calculate flash power, for cameras with flash) before 8998 capturing a high-quality still image. 8999 9000 A `LIMITED` device that only lists the `BACKWARDS_COMPATIBLE` capability is only 9001 required to support full-automatic operation and post-processing (`OFF` is not 9002 supported for android.control.aeMode, android.control.afMode, or 9003 android.control.awbMode) 9004 9005 Additional capabilities may optionally be supported by a `LIMITED`-level device, and 9006 can be checked for in android.request.availableCapabilities. 9007 </notes> 9008 </value> 9009 <value> 9010 FULL 9011 <notes> 9012 This camera device is capable of supporting advanced imaging applications. 9013 9014 The stream configurations listed in the `FULL`, `LEGACY` and `LIMITED` tables in the 9015 {@link android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession 9016 createCaptureSession} documentation are guaranteed to be supported. 9017 9018 A `FULL` device will support below capabilities: 9019 9020 * `BURST_CAPTURE` capability (android.request.availableCapabilities contains 9021 `BURST_CAPTURE`) 9022 * Per frame control (android.sync.maxLatency `==` PER_FRAME_CONTROL) 9023 * Manual sensor control (android.request.availableCapabilities contains `MANUAL_SENSOR`) 9024 * Manual post-processing control (android.request.availableCapabilities contains 9025 `MANUAL_POST_PROCESSING`) 9026 * The required exposure time range defined in android.sensor.info.exposureTimeRange 9027 * The required maxFrameDuration defined in android.sensor.info.maxFrameDuration 9028 9029 Note: 9030 Pre-API level 23, FULL devices also supported arbitrary cropping region 9031 (android.scaler.croppingType `== FREEFORM`); this requirement was relaxed in API level 9032 23, and `FULL` devices may only support `CENTERED` cropping. 9033 </notes> 9034 </value> 9035 <value> 9036 LEGACY 9037 <notes> 9038 This camera device is running in backward compatibility mode. 9039 9040 Only the stream configurations listed in the `LEGACY` table in the {@link 9041 android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession 9042 createCaptureSession} documentation are supported. 9043 9044 A `LEGACY` device does not support per-frame control, manual sensor control, manual 9045 post-processing, arbitrary cropping regions, and has relaxed performance constraints. 9046 No additional capabilities beyond `BACKWARD_COMPATIBLE` will ever be listed by a 9047 `LEGACY` device in android.request.availableCapabilities. 9048 9049 In addition, the android.control.aePrecaptureTrigger is not functional on `LEGACY` 9050 devices. Instead, every request that includes a JPEG-format output target is treated 9051 as triggering a still capture, internally executing a precapture trigger. This may 9052 fire the flash for flash power metering during precapture, and then fire the flash 9053 for the final capture, if a flash is available on the device and the AE mode is set to 9054 enable the flash. 9055 </notes> 9056 </value> 9057 <value> 9058 3 9059 <notes> 9060 This camera device is capable of YUV reprocessing and RAW data capture, in addition to 9061 FULL-level capabilities. 9062 9063 The stream configurations listed in the `LEVEL_3`, `RAW`, `FULL`, `LEGACY` and 9064 `LIMITED` tables in the {@link 9065 android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession 9066 createCaptureSession} documentation are guaranteed to be supported. 9067 9068 The following additional capabilities are guaranteed to be supported: 9069 9070 * `YUV_REPROCESSING` capability (android.request.availableCapabilities contains 9071 `YUV_REPROCESSING`) 9072 * `RAW` capability (android.request.availableCapabilities contains 9073 `RAW`) 9074 </notes> 9075 </value> 9076 <value hal_version="3.3"> 9077 EXTERNAL 9078 <notes> 9079 This camera device is backed by an external camera connected to this Android device. 9080 9081 The device has capability identical to a LIMITED level device, with the following 9082 exceptions: 9083 9084 * The device may not report lens/sensor related information such as 9085 - android.lens.focalLength 9086 - android.lens.info.hyperfocalDistance 9087 - android.sensor.info.physicalSize 9088 - android.sensor.info.whiteLevel 9089 - android.sensor.blackLevelPattern 9090 - android.sensor.info.colorFilterArrangement 9091 - android.sensor.rollingShutterSkew 9092 * The device will report 0 for android.sensor.orientation 9093 * The device has less guarantee on stable framerate, as the framerate partly depends 9094 on the external camera being used. 9095 </notes> 9096 </value> 9097 </enum> 9098 <description> 9099 Generally classifies the overall set of the camera device functionality. 9100 </description> 9101 <details> 9102 The supported hardware level is a high-level description of the camera device's 9103 capabilities, summarizing several capabilities into one field. Each level adds additional 9104 features to the previous one, and is always a strict superset of the previous level. 9105 The ordering is `LEGACY < LIMITED < FULL < LEVEL_3`. 9106 9107 Starting from `LEVEL_3`, the level enumerations are guaranteed to be in increasing 9108 numerical value as well. To check if a given device is at least at a given hardware level, 9109 the following code snippet can be used: 9110 9111 // Returns true if the device supports the required hardware level, or better. 9112 boolean isHardwareLevelSupported(CameraCharacteristics c, int requiredLevel) { 9113 int deviceLevel = c.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); 9114 if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) { 9115 return requiredLevel == deviceLevel; 9116 } 9117 // deviceLevel is not LEGACY, can use numerical sort 9118 return requiredLevel <= deviceLevel; 9119 } 9120 9121 At a high level, the levels are: 9122 9123 * `LEGACY` devices operate in a backwards-compatibility mode for older 9124 Android devices, and have very limited capabilities. 9125 * `LIMITED` devices represent the 9126 baseline feature set, and may also include additional capabilities that are 9127 subsets of `FULL`. 9128 * `FULL` devices additionally support per-frame manual control of sensor, flash, lens and 9129 post-processing settings, and image capture at a high rate. 9130 * `LEVEL_3` devices additionally support YUV reprocessing and RAW image capture, along 9131 with additional output stream configurations. 9132 9133 See the individual level enums for full descriptions of the supported capabilities. The 9134 android.request.availableCapabilities entry describes the device's capabilities at a 9135 finer-grain level, if needed. In addition, many controls have their available settings or 9136 ranges defined in individual entries from {@link 9137 android.hardware.camera2.CameraCharacteristics|ACameraManager_getCameraCharacteristics}. 9138 9139 Some features are not part of any particular hardware level or capability and must be 9140 queried separately. These include: 9141 9142 * Calibrated timestamps (android.sensor.info.timestampSource `==` REALTIME) 9143 * Precision lens control (android.lens.info.focusDistanceCalibration `==` CALIBRATED) 9144 * Face detection (android.statistics.info.availableFaceDetectModes) 9145 * Optical or electrical image stabilization 9146 (android.lens.info.availableOpticalStabilization, 9147 android.control.availableVideoStabilizationModes) 9148 9149 </details> 9150 <hal_details> 9151 A camera HALv3 device can implement one of three possible operational modes; LIMITED, 9152 FULL, and LEVEL_3. 9153 9154 FULL support or better is expected from new higher-end devices. Limited 9155 mode has hardware requirements roughly in line with those for a camera HAL device v1 9156 implementation, and is expected from older or inexpensive devices. Each level is a strict 9157 superset of the previous level, and they share the same essential operational flow. 9158 9159 For full details refer to "S3. Operational Modes" in camera3.h 9160 9161 Camera HAL3+ must not implement LEGACY mode. It is there for backwards compatibility in 9162 the `android.hardware.camera2` user-facing API only on legacy HALv1 devices, and is 9163 implemented by the camera framework code. 9164 9165 EXTERNAL level devices have lower peformance bar in CTS since the peformance might depend 9166 on the external camera being used and is not fully controlled by the device manufacturer. 9167 The ITS test suite is exempted for the same reason. 9168 </hal_details> 9169 </entry> 9170 <entry name="version" type="byte" visibility="public" typedef="string" hal_version="3.3"> 9171 <description> 9172 A short string for manufacturer version information about the camera device, such as 9173 ISP hardware, sensors, etc. 9174 </description> 9175 <details> 9176 This can be used in {@link android.media.ExifInterface#TAG_IMAGE_DESCRIPTION TAG_IMAGE_DESCRIPTION} 9177 in jpeg EXIF. This key may be absent if no version information is available on the 9178 device. 9179 </details> 9180 <hal_details> 9181 The string must consist of only alphanumeric characters, punctuation, and 9182 whitespace, i.e. it must match regular expression "[\p{Alnum}\p{Punct}\p{Space}]*". 9183 It must not exceed 256 characters. 9184 </hal_details> 9185 </entry> 9186 </static> 9187 </section> 9188 <section name="blackLevel"> 9189 <controls> 9190 <entry name="lock" type="byte" visibility="public" enum="true" 9191 typedef="boolean" hwlevel="full"> 9192 <enum> 9193 <value>OFF</value> 9194 <value>ON</value> 9195 </enum> 9196 <description> Whether black-level compensation is locked 9197 to its current values, or is free to vary.</description> 9198 <details>When set to `true` (ON), the values used for black-level 9199 compensation will not change until the lock is set to 9200 `false` (OFF). 9201 9202 Since changes to certain capture parameters (such as 9203 exposure time) may require resetting of black level 9204 compensation, the camera device must report whether setting 9205 the black level lock was successful in the output result 9206 metadata. 9207 9208 For example, if a sequence of requests is as follows: 9209 9210 * Request 1: Exposure = 10ms, Black level lock = OFF 9211 * Request 2: Exposure = 10ms, Black level lock = ON 9212 * Request 3: Exposure = 10ms, Black level lock = ON 9213 * Request 4: Exposure = 20ms, Black level lock = ON 9214 * Request 5: Exposure = 20ms, Black level lock = ON 9215 * Request 6: Exposure = 20ms, Black level lock = ON 9216 9217 And the exposure change in Request 4 requires the camera 9218 device to reset the black level offsets, then the output 9219 result metadata is expected to be: 9220 9221 * Result 1: Exposure = 10ms, Black level lock = OFF 9222 * Result 2: Exposure = 10ms, Black level lock = ON 9223 * Result 3: Exposure = 10ms, Black level lock = ON 9224 * Result 4: Exposure = 20ms, Black level lock = OFF 9225 * Result 5: Exposure = 20ms, Black level lock = ON 9226 * Result 6: Exposure = 20ms, Black level lock = ON 9227 9228 This indicates to the application that on frame 4, black 9229 levels were reset due to exposure value changes, and pixel 9230 values may not be consistent across captures. 9231 9232 The camera device will maintain the lock to the extent 9233 possible, only overriding the lock to OFF when changes to 9234 other request parameters require a black level recalculation 9235 or reset. 9236 </details> 9237 <hal_details> 9238 If for some reason black level locking is no longer possible 9239 (for example, the analog gain has changed, which forces 9240 black level offsets to be recalculated), then the HAL must 9241 override this request (and it must report 'OFF' when this 9242 does happen) until the next capture for which locking is 9243 possible again.</hal_details> 9244 <tag id="HAL2" /> 9245 </entry> 9246 </controls> 9247 <dynamic> 9248 <clone entry="android.blackLevel.lock" 9249 kind="controls"> 9250 <details> 9251 Whether the black level offset was locked for this frame. Should be 9252 ON if android.blackLevel.lock was ON in the capture request, unless 9253 a change in other capture settings forced the camera device to 9254 perform a black level reset. 9255 </details> 9256 </clone> 9257 </dynamic> 9258 </section> 9259 <section name="sync"> 9260 <dynamic> 9261 <entry name="frameNumber" type="int64" visibility="ndk_public" 9262 enum="true" hwlevel="legacy"> 9263 <enum> 9264 <value id="-1">CONVERGING 9265 <notes> 9266 The current result is not yet fully synchronized to any request. 9267 9268 Synchronization is in progress, and reading metadata from this 9269 result may include a mix of data that have taken effect since the 9270 last synchronization time. 9271 9272 In some future result, within android.sync.maxLatency frames, 9273 this value will update to the actual frame number frame number 9274 the result is guaranteed to be synchronized to (as long as the 9275 request settings remain constant). 9276 </notes> 9277 </value> 9278 <value id="-2">UNKNOWN 9279 <notes> 9280 The current result's synchronization status is unknown. 9281 9282 The result may have already converged, or it may be in 9283 progress. Reading from this result may include some mix 9284 of settings from past requests. 9285 9286 After a settings change, the new settings will eventually all 9287 take effect for the output buffers and results. However, this 9288 value will not change when that happens. Altering settings 9289 rapidly may provide outcomes using mixes of settings from recent 9290 requests. 9291 9292 This value is intended primarily for backwards compatibility with 9293 the older camera implementations (for android.hardware.Camera). 9294 </notes> 9295 </value> 9296 </enum> 9297 <description>The frame number corresponding to the last request 9298 with which the output result (metadata + buffers) has been fully 9299 synchronized.</description> 9300 <range>Either a non-negative value corresponding to a 9301 `frame_number`, or one of the two enums (CONVERGING / UNKNOWN). 9302 </range> 9303 <details> 9304 When a request is submitted to the camera device, there is usually a 9305 delay of several frames before the controls get applied. A camera 9306 device may either choose to account for this delay by implementing a 9307 pipeline and carefully submit well-timed atomic control updates, or 9308 it may start streaming control changes that span over several frame 9309 boundaries. 9310 9311 In the latter case, whenever a request's settings change relative to 9312 the previous submitted request, the full set of changes may take 9313 multiple frame durations to fully take effect. Some settings may 9314 take effect sooner (in less frame durations) than others. 9315 9316 While a set of control changes are being propagated, this value 9317 will be CONVERGING. 9318 9319 Once it is fully known that a set of control changes have been 9320 finished propagating, and the resulting updated control settings 9321 have been read back by the camera device, this value will be set 9322 to a non-negative frame number (corresponding to the request to 9323 which the results have synchronized to). 9324 9325 Older camera device implementations may not have a way to detect 9326 when all camera controls have been applied, and will always set this 9327 value to UNKNOWN. 9328 9329 FULL capability devices will always have this value set to the 9330 frame number of the request corresponding to this result. 9331 9332 _Further details_: 9333 9334 * Whenever a request differs from the last request, any future 9335 results not yet returned may have this value set to CONVERGING (this 9336 could include any in-progress captures not yet returned by the camera 9337 device, for more details see pipeline considerations below). 9338 * Submitting a series of multiple requests that differ from the 9339 previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3) 9340 moves the new synchronization frame to the last non-repeating 9341 request (using the smallest frame number from the contiguous list of 9342 repeating requests). 9343 * Submitting the same request repeatedly will not change this value 9344 to CONVERGING, if it was already a non-negative value. 9345 * When this value changes to non-negative, that means that all of the 9346 metadata controls from the request have been applied, all of the 9347 metadata controls from the camera device have been read to the 9348 updated values (into the result), and all of the graphics buffers 9349 corresponding to this result are also synchronized to the request. 9350 9351 _Pipeline considerations_: 9352 9353 Submitting a request with updated controls relative to the previously 9354 submitted requests may also invalidate the synchronization state 9355 of all the results corresponding to currently in-flight requests. 9356 9357 In other words, results for this current request and up to 9358 android.request.pipelineMaxDepth prior requests may have their 9359 android.sync.frameNumber change to CONVERGING. 9360 </details> 9361 <hal_details> 9362 Using UNKNOWN here is illegal unless android.sync.maxLatency 9363 is also UNKNOWN. 9364 9365 FULL capability devices should simply set this value to the 9366 `frame_number` of the request this result corresponds to. 9367 </hal_details> 9368 <tag id="V1" /> 9369 </entry> 9370 </dynamic> 9371 <static> 9372 <entry name="maxLatency" type="int32" visibility="public" enum="true" 9373 hwlevel="legacy"> 9374 <enum> 9375 <value id="0">PER_FRAME_CONTROL 9376 <notes> 9377 Every frame has the requests immediately applied. 9378 9379 Changing controls over multiple requests one after another will 9380 produce results that have those controls applied atomically 9381 each frame. 9382 9383 All FULL capability devices will have this as their maxLatency. 9384 </notes> 9385 </value> 9386 <value id="-1">UNKNOWN 9387 <notes> 9388 Each new frame has some subset (potentially the entire set) 9389 of the past requests applied to the camera settings. 9390 9391 By submitting a series of identical requests, the camera device 9392 will eventually have the camera settings applied, but it is 9393 unknown when that exact point will be. 9394 9395 All LEGACY capability devices will have this as their maxLatency. 9396 </notes> 9397 </value> 9398 </enum> 9399 <description> 9400 The maximum number of frames that can occur after a request 9401 (different than the previous) has been submitted, and before the 9402 result's state becomes synchronized. 9403 </description> 9404 <units>Frame counts</units> 9405 <range>A positive value, PER_FRAME_CONTROL, or UNKNOWN.</range> 9406 <details> 9407 This defines the maximum distance (in number of metadata results), 9408 between the frame number of the request that has new controls to apply 9409 and the frame number of the result that has all the controls applied. 9410 9411 In other words this acts as an upper boundary for how many frames 9412 must occur before the camera device knows for a fact that the new 9413 submitted camera settings have been applied in outgoing frames. 9414 </details> 9415 <hal_details> 9416 For example if maxLatency was 2, 9417 9418 initial request = X (repeating) 9419 request1 = X 9420 request2 = Y 9421 request3 = Y 9422 request4 = Y 9423 9424 where requestN has frameNumber N, and the first of the repeating 9425 initial request's has frameNumber F (and F < 1). 9426 9427 initial result = X' + { android.sync.frameNumber == F } 9428 result1 = X' + { android.sync.frameNumber == F } 9429 result2 = X' + { android.sync.frameNumber == CONVERGING } 9430 result3 = X' + { android.sync.frameNumber == CONVERGING } 9431 result4 = X' + { android.sync.frameNumber == 2 } 9432 9433 where resultN has frameNumber N. 9434 9435 Since `result4` has a `frameNumber == 4` and 9436 `android.sync.frameNumber == 2`, the distance is clearly 9437 `4 - 2 = 2`. 9438 9439 Use `frame_count` from camera3_request_t instead of 9440 android.request.frameCount or 9441 `{@link android.hardware.camera2.CaptureResult#getFrameNumber}`. 9442 9443 LIMITED devices are strongly encouraged to use a non-negative 9444 value. If UNKNOWN is used here then app developers do not have a way 9445 to know when sensor settings have been applied. 9446 </hal_details> 9447 <tag id="V1" /> 9448 </entry> 9449 </static> 9450 </section> 9451 <section name="reprocess"> 9452 <controls> 9453 <entry name="effectiveExposureFactor" type="float" visibility="java_public" hwlevel="limited"> 9454 <description> 9455 The amount of exposure time increase factor applied to the original output 9456 frame by the application processing before sending for reprocessing. 9457 </description> 9458 <units>Relative exposure time increase factor.</units> 9459 <range> &gt;= 1.0</range> 9460 <details> 9461 This is optional, and will be supported if the camera device supports YUV_REPROCESSING 9462 capability (android.request.availableCapabilities contains YUV_REPROCESSING). 9463 9464 For some YUV reprocessing use cases, the application may choose to filter the original 9465 output frames to effectively reduce the noise to the same level as a frame that was 9466 captured with longer exposure time. To be more specific, assuming the original captured 9467 images were captured with a sensitivity of S and an exposure time of T, the model in 9468 the camera device is that the amount of noise in the image would be approximately what 9469 would be expected if the original capture parameters had been a sensitivity of 9470 S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather 9471 than S and T respectively. If the captured images were processed by the application 9472 before being sent for reprocessing, then the application may have used image processing 9473 algorithms and/or multi-frame image fusion to reduce the noise in the 9474 application-processed images (input images). By using the effectiveExposureFactor 9475 control, the application can communicate to the camera device the actual noise level 9476 improvement in the application-processed image. With this information, the camera 9477 device can select appropriate noise reduction and edge enhancement parameters to avoid 9478 excessive noise reduction (android.noiseReduction.mode) and insufficient edge 9479 enhancement (android.edge.mode) being applied to the reprocessed frames. 9480 9481 For example, for multi-frame image fusion use case, the application may fuse 9482 multiple output frames together to a final frame for reprocessing. When N image are 9483 fused into 1 image for reprocessing, the exposure time increase factor could be up to 9484 square root of N (based on a simple photon shot noise model). The camera device will 9485 adjust the reprocessing noise reduction and edge enhancement parameters accordingly to 9486 produce the best quality images. 9487 9488 This is relative factor, 1.0 indicates the application hasn't processed the input 9489 buffer in a way that affects its effective exposure time. 9490 9491 This control is only effective for YUV reprocessing capture request. For noise 9492 reduction reprocessing, it is only effective when `android.noiseReduction.mode != OFF`. 9493 Similarly, for edge enhancement reprocessing, it is only effective when 9494 `android.edge.mode != OFF`. 9495 </details> 9496 <tag id="REPROC" /> 9497 </entry> 9498 </controls> 9499 <dynamic> 9500 <clone entry="android.reprocess.effectiveExposureFactor" kind="controls"> 9501 </clone> 9502 </dynamic> 9503 <static> 9504 <entry name="maxCaptureStall" type="int32" visibility="java_public" hwlevel="limited"> 9505 <description> 9506 The maximal camera capture pipeline stall (in unit of frame count) introduced by a 9507 reprocess capture request. 9508 </description> 9509 <units>Number of frames.</units> 9510 <range> &lt;= 4</range> 9511 <details> 9512 The key describes the maximal interference that one reprocess (input) request 9513 can introduce to the camera simultaneous streaming of regular (output) capture 9514 requests, including repeating requests. 9515 9516 When a reprocessing capture request is submitted while a camera output repeating request 9517 (e.g. preview) is being served by the camera device, it may preempt the camera capture 9518 pipeline for at least one frame duration so that the camera device is unable to process 9519 the following capture request in time for the next sensor start of exposure boundary. 9520 When this happens, the application may observe a capture time gap (longer than one frame 9521 duration) between adjacent capture output frames, which usually exhibits as preview 9522 glitch if the repeating request output targets include a preview surface. This key gives 9523 the worst-case number of frame stall introduced by one reprocess request with any kind of 9524 formats/sizes combination. 9525 9526 If this key reports 0, it means a reprocess request doesn't introduce any glitch to the 9527 ongoing camera repeating request outputs, as if this reprocess request is never issued. 9528 9529 This key is supported if the camera device supports PRIVATE or YUV reprocessing ( 9530 i.e. android.request.availableCapabilities contains PRIVATE_REPROCESSING or 9531 YUV_REPROCESSING). 9532 </details> 9533 <tag id="REPROC" /> 9534 </entry> 9535 </static> 9536 </section> 9537 <section name="depth"> 9538 <static> 9539 <entry name="maxDepthSamples" type="int32" visibility="system" hwlevel="limited"> 9540 <description>Maximum number of points that a depth point cloud may contain. 9541 </description> 9542 <details> 9543 If a camera device supports outputting depth range data in the form of a depth point 9544 cloud ({@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD}), this is the maximum 9545 number of points an output buffer may contain. 9546 9547 Any given buffer may contain between 0 and maxDepthSamples points, inclusive. 9548 If output in the depth point cloud format is not supported, this entry will 9549 not be defined. 9550 </details> 9551 <tag id="DEPTH" /> 9552 </entry> 9553 <entry name="availableDepthStreamConfigurations" type="int32" visibility="ndk_public" 9554 enum="true" container="array" typedef="streamConfiguration" hwlevel="limited"> 9555 <array> 9556 <size>n</size> 9557 <size>4</size> 9558 </array> 9559 <enum> 9560 <value>OUTPUT</value> 9561 <value>INPUT</value> 9562 </enum> 9563 <description>The available depth dataspace stream 9564 configurations that this camera device supports 9565 (i.e. format, width, height, output/input stream). 9566 </description> 9567 <details> 9568 These are output stream configurations for use with 9569 dataSpace HAL_DATASPACE_DEPTH. The configurations are 9570 listed as `(format, width, height, input?)` tuples. 9571 9572 Only devices that support depth output for at least 9573 the HAL_PIXEL_FORMAT_Y16 dense depth map may include 9574 this entry. 9575 9576 A device that also supports the HAL_PIXEL_FORMAT_BLOB 9577 sparse depth point cloud must report a single entry for 9578 the format in this list as `(HAL_PIXEL_FORMAT_BLOB, 9579 android.depth.maxDepthSamples, 1, OUTPUT)` in addition to 9580 the entries for HAL_PIXEL_FORMAT_Y16. 9581 </details> 9582 <tag id="DEPTH" /> 9583 </entry> 9584 <entry name="availableDepthMinFrameDurations" type="int64" visibility="ndk_public" 9585 container="array" typedef="streamConfigurationDuration" hwlevel="limited"> 9586 <array> 9587 <size>4</size> 9588 <size>n</size> 9589 </array> 9590 <description>This lists the minimum frame duration for each 9591 format/size combination for depth output formats. 9592 </description> 9593 <units>(format, width, height, ns) x n</units> 9594 <details> 9595 This should correspond to the frame duration when only that 9596 stream is active, with all processing (typically in android.*.mode) 9597 set to either OFF or FAST. 9598 9599 When multiple streams are used in a request, the minimum frame 9600 duration will be max(individual stream min durations). 9601 9602 The minimum frame duration of a stream (of a particular format, size) 9603 is the same regardless of whether the stream is input or output. 9604 9605 See android.sensor.frameDuration and 9606 android.scaler.availableStallDurations for more details about 9607 calculating the max frame rate. 9608 </details> 9609 <tag id="DEPTH" /> 9610 </entry> 9611 <entry name="availableDepthStallDurations" type="int64" visibility="ndk_public" 9612 container="array" typedef="streamConfigurationDuration" hwlevel="limited"> 9613 <array> 9614 <size>4</size> 9615 <size>n</size> 9616 </array> 9617 <description>This lists the maximum stall duration for each 9618 output format/size combination for depth streams. 9619 </description> 9620 <units>(format, width, height, ns) x n</units> 9621 <details> 9622 A stall duration is how much extra time would get added 9623 to the normal minimum frame duration for a repeating request 9624 that has streams with non-zero stall. 9625 9626 This functions similarly to 9627 android.scaler.availableStallDurations for depth 9628 streams. 9629 9630 All depth output stream formats may have a nonzero stall 9631 duration. 9632 </details> 9633 <tag id="DEPTH" /> 9634 </entry> 9635 <entry name="depthIsExclusive" type="byte" visibility="public" 9636 enum="true" typedef="boolean" hwlevel="limited"> 9637 <enum> 9638 <value>FALSE</value> 9639 <value>TRUE</value> 9640 </enum> 9641 <description>Indicates whether a capture request may target both a 9642 DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as 9643 YUV_420_888, JPEG, or RAW) simultaneously. 9644 </description> 9645 <details> 9646 If TRUE, including both depth and color outputs in a single 9647 capture request is not supported. An application must interleave color 9648 and depth requests. If FALSE, a single request can target both types 9649 of output. 9650 9651 Typically, this restriction exists on camera devices that 9652 need to emit a specific pattern or wavelength of light to 9653 measure depth values, which causes the color image to be 9654 corrupted during depth measurement. 9655 </details> 9656 </entry> 9657 </static> 9658 </section> 9659 <section name="logicalMultiCamera"> 9660 <static> 9661 <entry name="physicalIds" type="byte" visibility="hidden" 9662 container="array" hwlevel="limited" hal_version="3.3"> 9663 <array> 9664 <size>n</size> 9665 </array> 9666 <description>String containing the ids of the underlying physical cameras. 9667 </description> 9668 <units>UTF-8 null-terminated string</units> 9669 <details> 9670 For a logical camera, this is concatenation of all underlying physical camera ids. 9671 The null terminator for physical camera id must be preserved so that the whole string 9672 can be tokenized using '\0' to generate list of physical camera ids. 9673 9674 For example, if the physical camera ids of the logical camera are "2" and "3", the 9675 value of this tag will be ['2', '\0', '3', '\0']. 9676 9677 The number of physical camera ids must be no less than 2. 9678 </details> 9679 <tag id="LOGICALCAMERA" /> 9680 </entry> 9681 <entry name="sensorSyncType" type="byte" visibility="public" 9682 enum="true" hwlevel="limited" hal_version="3.3"> 9683 <enum> 9684 <value>APPROXIMATE 9685 <notes> 9686 A software mechanism is used to synchronize between the physical cameras. As a result, 9687 the timestamp of an image from a physical stream is only an approximation of the 9688 image sensor start-of-exposure time. 9689 </notes> 9690 </value> 9691 <value>CALIBRATED 9692 <notes> 9693 The camera device supports frame timestamp synchronization at the hardware level, 9694 and the timestamp of a physical stream image accurately reflects its 9695 start-of-exposure time. 9696 </notes> 9697 </value> 9698 </enum> 9699 <description>The accuracy of frame timestamp synchronization between physical cameras</description> 9700 <details> 9701 The accuracy of the frame timestamp synchronization determines the physical cameras' 9702 ability to start exposure at the same time. If the sensorSyncType is CALIBRATED, 9703 the physical camera sensors usually run in master-slave mode so that their shutter 9704 time is synchronized. For APPROXIMATE sensorSyncType, the camera sensors usually run in 9705 master-master mode, and there could be offset between their start of exposure. 9706 9707 In both cases, all images generated for a particular capture request still carry the same 9708 timestamps, so that they can be used to look up the matching frame number and 9709 onCaptureStarted callback. 9710 </details> 9711 <tag id="LOGICALCAMERA" /> 9712 </entry> 9713 </static> 9714 </section> 9715 <section name="distortionCorrection"> 9716 <controls> 9717 <entry name="mode" type="byte" visibility="public" enum="true" hal_version="3.3"> 9718 <enum> 9719 <value>OFF 9720 <notes>No distortion correction is applied.</notes></value> 9721 <value>FAST <notes>Lens distortion correction is applied without reducing frame rate 9722 relative to sensor output. It may be the same as OFF if distortion correction would 9723 reduce frame rate relative to sensor.</notes></value> 9724 <value>HIGH_QUALITY <notes>High-quality distortion correction is applied, at the cost of 9725 possibly reduced frame rate relative to sensor output.</notes></value> 9726 </enum> 9727 <description>Mode of operation for the lens distortion correction block.</description> 9728 <range>android.distortionCorrection.availableModes</range> 9729 <details>The lens distortion correction block attempts to improve image quality by fixing 9730 radial, tangential, or other geometric aberrations in the camera device's optics. If 9731 available, the android.lens.distortion field documents the lens's distortion parameters. 9732 9733 OFF means no distortion correction is done. 9734 9735 FAST/HIGH_QUALITY both mean camera device determined distortion correction will be 9736 applied. HIGH_QUALITY mode indicates that the camera device will use the highest-quality 9737 correction algorithms, even if it slows down capture rate. FAST means the camera device 9738 will not slow down capture rate when applying correction. FAST may be the same as OFF if 9739 any correction at all would slow down capture rate. Every output stream will have a 9740 similar amount of enhancement applied. 9741 9742 The correction only applies to processed outputs such as YUV, JPEG, or DEPTH16; it is not 9743 applied to any RAW output. Metadata coordinates such as face rectangles or metering 9744 regions are also not affected by correction. 9745 9746 Applications enabling distortion correction need to pay extra attention when converting 9747 image coordinates between corrected output buffers and the sensor array. For example, if 9748 the app supports tap-to-focus and enables correction, it then has to apply the distortion 9749 model described in android.lens.distortion to the image buffer tap coordinates to properly 9750 calculate the tap position on the sensor active array to be used with 9751 android.control.afRegions. The same applies in reverse to detected face rectangles if 9752 they need to be drawn on top of the corrected output buffers. 9753 </details> 9754 </entry> 9755 </controls> 9756 <static> 9757 <entry name="availableModes" type="byte" visibility="public" 9758 type_notes="list of enums" container="array" typedef="enumList" hal_version="3.3"> 9759 <array> 9760 <size>n</size> 9761 </array> 9762 <description> 9763 List of distortion correction modes for android.distortionCorrection.mode that are 9764 supported by this camera device. 9765 </description> 9766 <range>Any value listed in android.distortionCorrection.mode</range> 9767 <details> 9768 No device is required to support this API; such devices will always list only 'OFF'. 9769 All devices that support this API will list both FAST and HIGH_QUALITY. 9770 </details> 9771 <hal_details> 9772 HAL must support both FAST and HIGH_QUALITY if distortion correction is available 9773 on the camera device, but the underlying implementation can be the same for both modes. 9774 That is, if the highest quality implementation on the camera device does not slow down 9775 capture rate, then FAST and HIGH_QUALITY will generate the same output. 9776 </hal_details> 9777 <tag id="V1" /> 9778 <tag id="REPROC" /> 9779 </entry> 9780 </static> 9781 <dynamic> 9782 <clone entry="android.distortionCorrection.mode" kind="controls" hal_version="3.3"> 9783 </clone> 9784 </dynamic> 9785 </section> 9786 </namespace> 9787</metadata> 9788