• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1<?xml version="1.0" encoding="utf-8"?>
2<!-- Copyright (C) 2016 The Android Open Source Project
3
4     Licensed under the Apache License, Version 2.0 (the "License");
5     you may not use this file except in compliance with the License.
6     You may obtain a copy of the License at
7
8          http://www.apache.org/licenses/LICENSE-2.0
9
10     Unless required by applicable law or agreed to in writing, software
11     distributed under the License is distributed on an "AS IS" BASIS,
12     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13     See the License for the specific language governing permissions and
14     limitations under the License.
15-->
16<metadata xmlns="http://schemas.android.com/service/camera/metadata/"
17xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
18xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata_properties.xsd">
19
20  <tags>
21    <tag id="BC">
22        Needed for backwards compatibility with old Java API
23    </tag>
24    <tag id="V1">
25        New features for first camera 2 release (API1)
26    </tag>
27    <tag id="RAW">
28        Needed for useful RAW image processing and DNG file support
29    </tag>
30    <tag id="HAL2">
31        Entry is only used by camera device HAL 2.x
32    </tag>
33    <tag id="FULL">
34        Entry is required for full hardware level devices, and optional for other hardware levels
35    </tag>
36    <tag id="DEPTH">
37        Entry is required for the depth capability.
38    </tag>
39    <tag id="REPROC">
40        Entry is required for the YUV or PRIVATE reprocessing capability.
41    </tag>
42    <tag id="FUTURE">
43        Entry is  under-specified and is not required for now. This is for book-keeping purpose,
44        do not implement or use it, it may be revised for future.
45    </tag>
46  </tags>
47
48  <types>
49    <typedef name="pairFloatFloat">
50      <language name="java">android.util.Pair&lt;Float,Float&gt;</language>
51    </typedef>
52    <typedef name="pairDoubleDouble">
53      <language name="java">android.util.Pair&lt;Double,Double&gt;</language>
54    </typedef>
55    <typedef name="rectangle">
56      <language name="java">android.graphics.Rect</language>
57    </typedef>
58    <typedef name="size">
59      <language name="java">android.util.Size</language>
60    </typedef>
61    <typedef name="string">
62      <language name="java">String</language>
63    </typedef>
64    <typedef name="boolean">
65      <language name="java">boolean</language>
66    </typedef>
67    <typedef name="imageFormat">
68      <language name="java">int</language>
69    </typedef>
70    <typedef name="streamConfigurationMap">
71      <language name="java">android.hardware.camera2.params.StreamConfigurationMap</language>
72    </typedef>
73    <typedef name="streamConfiguration">
74      <language name="java">android.hardware.camera2.params.StreamConfiguration</language>
75    </typedef>
76    <typedef name="streamConfigurationDuration">
77      <language name="java">android.hardware.camera2.params.StreamConfigurationDuration</language>
78    </typedef>
79    <typedef name="face">
80      <language name="java">android.hardware.camera2.params.Face</language>
81    </typedef>
82    <typedef name="meteringRectangle">
83      <language name="java">android.hardware.camera2.params.MeteringRectangle</language>
84    </typedef>
85    <typedef name="rangeFloat">
86      <language name="java">android.util.Range&lt;Float&gt;</language>
87    </typedef>
88    <typedef name="rangeInt">
89      <language name="java">android.util.Range&lt;Integer&gt;</language>
90    </typedef>
91    <typedef name="rangeLong">
92      <language name="java">android.util.Range&lt;Long&gt;</language>
93    </typedef>
94    <typedef name="colorSpaceTransform">
95      <language name="java">android.hardware.camera2.params.ColorSpaceTransform</language>
96    </typedef>
97    <typedef name="rggbChannelVector">
98      <language name="java">android.hardware.camera2.params.RggbChannelVector</language>
99    </typedef>
100    <typedef name="blackLevelPattern">
101      <language name="java">android.hardware.camera2.params.BlackLevelPattern</language>
102    </typedef>
103    <typedef name="enumList">
104      <language name="java">int</language>
105    </typedef>
106    <typedef name="sizeF">
107      <language name="java">android.util.SizeF</language>
108    </typedef>
109    <typedef name="point">
110      <language name="java">android.graphics.Point</language>
111    </typedef>
112    <typedef name="tonemapCurve">
113      <language name="java">android.hardware.camera2.params.TonemapCurve</language>
114    </typedef>
115    <typedef name="lensShadingMap">
116      <language name="java">android.hardware.camera2.params.LensShadingMap</language>
117    </typedef>
118    <typedef name="location">
119      <language name="java">android.location.Location</language>
120    </typedef>
121    <typedef name="highSpeedVideoConfiguration">
122      <language name="java">android.hardware.camera2.params.HighSpeedVideoConfiguration</language>
123    </typedef>
124    <typedef name="reprocessFormatsMap">
125      <language name="java">android.hardware.camera2.params.ReprocessFormatsMap</language>
126    </typedef>
127  </types>
128
129  <namespace name="android">
130    <section name="colorCorrection">
131      <controls>
132        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
133          <enum>
134            <value>TRANSFORM_MATRIX
135              <notes>Use the android.colorCorrection.transform matrix
136                and android.colorCorrection.gains to do color conversion.
137
138                All advanced white balance adjustments (not specified
139                by our white balance pipeline) must be disabled.
140
141                If AWB is enabled with `android.control.awbMode != OFF`, then
142                TRANSFORM_MATRIX is ignored. The camera device will override
143                this value to either FAST or HIGH_QUALITY.
144              </notes>
145            </value>
146            <value>FAST
147              <notes>Color correction processing must not slow down
148              capture rate relative to sensor raw output.
149
150              Advanced white balance adjustments above and beyond
151              the specified white balance pipeline may be applied.
152
153              If AWB is enabled with `android.control.awbMode != OFF`, then
154              the camera device uses the last frame's AWB values
155              (or defaults if AWB has never been run).
156            </notes>
157            </value>
158            <value>HIGH_QUALITY
159              <notes>Color correction processing operates at improved
160              quality but the capture rate might be reduced (relative to sensor
161              raw output rate)
162
163              Advanced white balance adjustments above and beyond
164              the specified white balance pipeline may be applied.
165
166              If AWB is enabled with `android.control.awbMode != OFF`, then
167              the camera device uses the last frame's AWB values
168              (or defaults if AWB has never been run).
169            </notes>
170            </value>
171          </enum>
172
173          <description>
174          The mode control selects how the image data is converted from the
175          sensor's native color into linear sRGB color.
176          </description>
177          <details>
178          When auto-white balance (AWB) is enabled with android.control.awbMode, this
179          control is overridden by the AWB routine. When AWB is disabled, the
180          application controls how the color mapping is performed.
181
182          We define the expected processing pipeline below. For consistency
183          across devices, this is always the case with TRANSFORM_MATRIX.
184
185          When either FULL or HIGH_QUALITY is used, the camera device may
186          do additional processing but android.colorCorrection.gains and
187          android.colorCorrection.transform will still be provided by the
188          camera device (in the results) and be roughly correct.
189
190          Switching to TRANSFORM_MATRIX and using the data provided from
191          FAST or HIGH_QUALITY will yield a picture with the same white point
192          as what was produced by the camera device in the earlier frame.
193
194          The expected processing pipeline is as follows:
195
196          ![White balance processing pipeline](android.colorCorrection.mode/processing_pipeline.png)
197
198          The white balance is encoded by two values, a 4-channel white-balance
199          gain vector (applied in the Bayer domain), and a 3x3 color transform
200          matrix (applied after demosaic).
201
202          The 4-channel white-balance gains are defined as:
203
204              android.colorCorrection.gains = [ R G_even G_odd B ]
205
206          where `G_even` is the gain for green pixels on even rows of the
207          output, and `G_odd` is the gain for green pixels on the odd rows.
208          These may be identical for a given camera device implementation; if
209          the camera device does not support a separate gain for even/odd green
210          channels, it will use the `G_even` value, and write `G_odd` equal to
211          `G_even` in the output result metadata.
212
213          The matrices for color transforms are defined as a 9-entry vector:
214
215              android.colorCorrection.transform = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
216
217          which define a transform from input sensor colors, `P_in = [ r g b ]`,
218          to output linear sRGB, `P_out = [ r' g' b' ]`,
219
220          with colors as follows:
221
222              r' = I0r + I1g + I2b
223              g' = I3r + I4g + I5b
224              b' = I6r + I7g + I8b
225
226          Both the input and output value ranges must match. Overflow/underflow
227          values are clipped to fit within the range.
228          </details>
229          <hal_details>
230          HAL must support both FAST and HIGH_QUALITY if color correction control is available
231          on the camera device, but the underlying implementation can be the same for both modes.
232          That is, if the highest quality implementation on the camera device does not slow down
233          capture rate, then FAST and HIGH_QUALITY should generate the same output.
234          </hal_details>
235        </entry>
236        <entry name="transform" type="rational" visibility="public"
237               type_notes="3x3 rational matrix in row-major order"
238               container="array" typedef="colorSpaceTransform" hwlevel="full">
239          <array>
240            <size>3</size>
241            <size>3</size>
242          </array>
243          <description>A color transform matrix to use to transform
244          from sensor RGB color space to output linear sRGB color space.
245          </description>
246          <units>Unitless scale factors</units>
247          <details>This matrix is either set by the camera device when the request
248          android.colorCorrection.mode is not TRANSFORM_MATRIX, or
249          directly by the application in the request when the
250          android.colorCorrection.mode is TRANSFORM_MATRIX.
251
252          In the latter case, the camera device may round the matrix to account
253          for precision issues; the final rounded matrix should be reported back
254          in this matrix result metadata. The transform should keep the magnitude
255          of the output color values within `[0, 1.0]` (assuming input color
256          values is within the normalized range `[0, 1.0]`), or clipping may occur.
257
258          The valid range of each matrix element varies on different devices, but
259          values within [-1.5, 3.0] are guaranteed not to be clipped.
260          </details>
261        </entry>
262        <entry name="gains" type="float" visibility="public"
263               type_notes="A 1D array of floats for 4 color channel gains"
264               container="array" typedef="rggbChannelVector" hwlevel="full">
265          <array>
266            <size>4</size>
267          </array>
268          <description>Gains applying to Bayer raw color channels for
269          white-balance.</description>
270          <units>Unitless gain factors</units>
271          <details>
272          These per-channel gains are either set by the camera device
273          when the request android.colorCorrection.mode is not
274          TRANSFORM_MATRIX, or directly by the application in the
275          request when the android.colorCorrection.mode is
276          TRANSFORM_MATRIX.
277
278          The gains in the result metadata are the gains actually
279          applied by the camera device to the current frame.
280
281          The valid range of gains varies on different devices, but gains
282          between [1.0, 3.0] are guaranteed not to be clipped. Even if a given
283          device allows gains below 1.0, this is usually not recommended because
284          this can create color artifacts.
285          </details>
286          <hal_details>
287          The 4-channel white-balance gains are defined in
288          the order of `[R G_even G_odd B]`, where `G_even` is the gain
289          for green pixels on even rows of the output, and `G_odd`
290          is the gain for green pixels on the odd rows.
291
292          If a HAL does not support a separate gain for even/odd green
293          channels, it must use the `G_even` value, and write
294          `G_odd` equal to `G_even` in the output result metadata.
295          </hal_details>
296        </entry>
297        <entry name="aberrationMode" type="byte" visibility="public" enum="true" hwlevel="legacy">
298          <enum>
299            <value>OFF
300              <notes>
301                No aberration correction is applied.
302              </notes>
303            </value>
304            <value>FAST
305              <notes>
306                Aberration correction will not slow down capture rate
307                relative to sensor raw output.
308            </notes>
309            </value>
310            <value>HIGH_QUALITY
311              <notes>
312                Aberration correction operates at improved quality but the capture rate might be
313                reduced (relative to sensor raw output rate)
314            </notes>
315            </value>
316          </enum>
317          <description>
318            Mode of operation for the chromatic aberration correction algorithm.
319          </description>
320          <range>android.colorCorrection.availableAberrationModes</range>
321          <details>
322            Chromatic (color) aberration is caused by the fact that different wavelengths of light
323            can not focus on the same point after exiting from the lens. This metadata defines
324            the high level control of chromatic aberration correction algorithm, which aims to
325            minimize the chromatic artifacts that may occur along the object boundaries in an
326            image.
327
328            FAST/HIGH_QUALITY both mean that camera device determined aberration
329            correction will be applied. HIGH_QUALITY mode indicates that the camera device will
330            use the highest-quality aberration correction algorithms, even if it slows down
331            capture rate. FAST means the camera device will not slow down capture rate when
332            applying aberration correction.
333
334            LEGACY devices will always be in FAST mode.
335          </details>
336        </entry>
337      </controls>
338      <dynamic>
339        <clone entry="android.colorCorrection.mode" kind="controls">
340        </clone>
341        <clone entry="android.colorCorrection.transform" kind="controls">
342        </clone>
343        <clone entry="android.colorCorrection.gains" kind="controls">
344        </clone>
345        <clone entry="android.colorCorrection.aberrationMode" kind="controls">
346        </clone>
347      </dynamic>
348      <static>
349        <entry name="availableAberrationModes" type="byte" visibility="public"
350        type_notes="list of enums" container="array" typedef="enumList" hwlevel="legacy">
351          <array>
352            <size>n</size>
353          </array>
354          <description>
355            List of aberration correction modes for android.colorCorrection.aberrationMode that are
356            supported by this camera device.
357          </description>
358          <range>Any value listed in android.colorCorrection.aberrationMode</range>
359          <details>
360            This key lists the valid modes for android.colorCorrection.aberrationMode.  If no
361            aberration correction modes are available for a device, this list will solely include
362            OFF mode. All camera devices will support either OFF or FAST mode.
363
364            Camera devices that support the MANUAL_POST_PROCESSING capability will always list
365            OFF mode. This includes all FULL level devices.
366
367            LEGACY devices will always only support FAST mode.
368          </details>
369          <hal_details>
370            HAL must support both FAST and HIGH_QUALITY if chromatic aberration control is available
371            on the camera device, but the underlying implementation can be the same for both modes.
372            That is, if the highest quality implementation on the camera device does not slow down
373            capture rate, then FAST and HIGH_QUALITY will generate the same output.
374          </hal_details>
375          <tag id="V1" />
376        </entry>
377      </static>
378    </section>
379    <section name="control">
380      <controls>
381        <entry name="aeAntibandingMode" type="byte" visibility="public"
382               enum="true" hwlevel="legacy">
383          <enum>
384            <value>OFF
385              <notes>
386                The camera device will not adjust exposure duration to
387                avoid banding problems.
388              </notes>
389            </value>
390            <value>50HZ
391              <notes>
392                The camera device will adjust exposure duration to
393                avoid banding problems with 50Hz illumination sources.
394              </notes>
395            </value>
396            <value>60HZ
397              <notes>
398                The camera device will adjust exposure duration to
399                avoid banding problems with 60Hz illumination
400                sources.
401              </notes>
402            </value>
403            <value>AUTO
404              <notes>
405                The camera device will automatically adapt its
406                antibanding routine to the current illumination
407                condition. This is the default mode if AUTO is
408                available on given camera device.
409              </notes>
410            </value>
411          </enum>
412          <description>
413            The desired setting for the camera device's auto-exposure
414            algorithm's antibanding compensation.
415          </description>
416          <range>
417            android.control.aeAvailableAntibandingModes
418          </range>
419          <details>
420            Some kinds of lighting fixtures, such as some fluorescent
421            lights, flicker at the rate of the power supply frequency
422            (60Hz or 50Hz, depending on country). While this is
423            typically not noticeable to a person, it can be visible to
424            a camera device. If a camera sets its exposure time to the
425            wrong value, the flicker may become visible in the
426            viewfinder as flicker or in a final captured image, as a
427            set of variable-brightness bands across the image.
428
429            Therefore, the auto-exposure routines of camera devices
430            include antibanding routines that ensure that the chosen
431            exposure value will not cause such banding. The choice of
432            exposure time depends on the rate of flicker, which the
433            camera device can detect automatically, or the expected
434            rate can be selected by the application using this
435            control.
436
437            A given camera device may not support all of the possible
438            options for the antibanding mode. The
439            android.control.aeAvailableAntibandingModes key contains
440            the available modes for a given camera device.
441
442            AUTO mode is the default if it is available on given
443            camera device. When AUTO mode is not available, the
444            default will be either 50HZ or 60HZ, and both 50HZ
445            and 60HZ will be available.
446
447            If manual exposure control is enabled (by setting
448            android.control.aeMode or android.control.mode to OFF),
449            then this setting has no effect, and the application must
450            ensure it selects exposure times that do not cause banding
451            issues. The android.statistics.sceneFlicker key can assist
452            the application in this.
453          </details>
454          <hal_details>
455            For all capture request templates, this field must be set
456            to AUTO if AUTO mode is available. If AUTO is not available,
457            the default must be either 50HZ or 60HZ, and both 50HZ and
458            60HZ must be available.
459
460            If manual exposure control is enabled (by setting
461            android.control.aeMode or android.control.mode to OFF),
462            then the exposure values provided by the application must not be
463            adjusted for antibanding.
464          </hal_details>
465          <tag id="BC" />
466        </entry>
467        <entry name="aeExposureCompensation" type="int32" visibility="public" hwlevel="legacy">
468          <description>Adjustment to auto-exposure (AE) target image
469          brightness.</description>
470          <units>Compensation steps</units>
471          <range>android.control.aeCompensationRange</range>
472          <details>
473          The adjustment is measured as a count of steps, with the
474          step size defined by android.control.aeCompensationStep and the
475          allowed range by android.control.aeCompensationRange.
476
477          For example, if the exposure value (EV) step is 0.333, '6'
478          will mean an exposure compensation of +2 EV; -3 will mean an
479          exposure compensation of -1 EV. One EV represents a doubling
480          of image brightness. Note that this control will only be
481          effective if android.control.aeMode `!=` OFF. This control
482          will take effect even when android.control.aeLock `== true`.
483
484          In the event of exposure compensation value being changed, camera device
485          may take several frames to reach the newly requested exposure target.
486          During that time, android.control.aeState field will be in the SEARCHING
487          state. Once the new exposure target is reached, android.control.aeState will
488          change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or
489          FLASH_REQUIRED (if the scene is too dark for still capture).
490          </details>
491          <tag id="BC" />
492        </entry>
493        <entry name="aeLock" type="byte" visibility="public" enum="true"
494               typedef="boolean" hwlevel="legacy">
495          <enum>
496            <value>OFF
497            <notes>Auto-exposure lock is disabled; the AE algorithm
498            is free to update its parameters.</notes></value>
499            <value>ON
500            <notes>Auto-exposure lock is enabled; the AE algorithm
501            must not update the exposure and sensitivity parameters
502            while the lock is active.
503
504            android.control.aeExposureCompensation setting changes
505            will still take effect while auto-exposure is locked.
506
507            Some rare LEGACY devices may not support
508            this, in which case the value will always be overridden to OFF.
509            </notes></value>
510          </enum>
511          <description>Whether auto-exposure (AE) is currently locked to its latest
512          calculated values.</description>
513          <details>
514          When set to `true` (ON), the AE algorithm is locked to its latest parameters,
515          and will not change exposure settings until the lock is set to `false` (OFF).
516
517          Note that even when AE is locked, the flash may be fired if
518          the android.control.aeMode is ON_AUTO_FLASH /
519          ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.
520
521          When android.control.aeExposureCompensation is changed, even if the AE lock
522          is ON, the camera device will still adjust its exposure value.
523
524          If AE precapture is triggered (see android.control.aePrecaptureTrigger)
525          when AE is already locked, the camera device will not change the exposure time
526          (android.sensor.exposureTime) and sensitivity (android.sensor.sensitivity)
527          parameters. The flash may be fired if the android.control.aeMode
528          is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the
529          android.control.aeMode is ON_ALWAYS_FLASH, the scene may become overexposed.
530          Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.
531
532          When an AE precapture sequence is triggered, AE unlock will not be able to unlock
533          the AE if AE is locked by the camera device internally during precapture metering
534          sequence In other words, submitting requests with AE unlock has no effect for an
535          ongoing precapture metering sequence. Otherwise, the precapture metering sequence
536          will never succeed in a sequence of preview requests where AE lock is always set
537          to `false`.
538
539          Since the camera device has a pipeline of in-flight requests, the settings that
540          get locked do not necessarily correspond to the settings that were present in the
541          latest capture result received from the camera device, since additional captures
542          and AE updates may have occurred even before the result was sent out. If an
543          application is switching between automatic and manual control and wishes to eliminate
544          any flicker during the switch, the following procedure is recommended:
545
546            1. Starting in auto-AE mode:
547            2. Lock AE
548            3. Wait for the first result to be output that has the AE locked
549            4. Copy exposure settings from that result into a request, set the request to manual AE
550            5. Submit the capture request, proceed to run manual AE as desired.
551
552          See android.control.aeState for AE lock related state transition details.
553          </details>
554          <tag id="BC" />
555        </entry>
556        <entry name="aeMode" type="byte" visibility="public" enum="true" hwlevel="legacy">
557          <enum>
558            <value>OFF
559              <notes>
560                The camera device's autoexposure routine is disabled.
561
562                The application-selected android.sensor.exposureTime,
563                android.sensor.sensitivity and
564                android.sensor.frameDuration are used by the camera
565                device, along with android.flash.* fields, if there's
566                a flash unit for this camera device.
567
568                Note that auto-white balance (AWB) and auto-focus (AF)
569                behavior is device dependent when AE is in OFF mode.
570                To have consistent behavior across different devices,
571                it is recommended to either set AWB and AF to OFF mode
572                or lock AWB and AF before setting AE to OFF.
573                See android.control.awbMode, android.control.afMode,
574                android.control.awbLock, and android.control.afTrigger
575                for more details.
576
577                LEGACY devices do not support the OFF mode and will
578                override attempts to use this value to ON.
579              </notes>
580            </value>
581            <value>ON
582              <notes>
583                The camera device's autoexposure routine is active,
584                with no flash control.
585
586                The application's values for
587                android.sensor.exposureTime,
588                android.sensor.sensitivity, and
589                android.sensor.frameDuration are ignored. The
590                application has control over the various
591                android.flash.* fields.
592              </notes>
593            </value>
594            <value>ON_AUTO_FLASH
595              <notes>
596                Like ON, except that the camera device also controls
597                the camera's flash unit, firing it in low-light
598                conditions.
599
600                The flash may be fired during a precapture sequence
601                (triggered by android.control.aePrecaptureTrigger) and
602                may be fired for captures for which the
603                android.control.captureIntent field is set to
604                STILL_CAPTURE
605              </notes>
606            </value>
607            <value>ON_ALWAYS_FLASH
608              <notes>
609                Like ON, except that the camera device also controls
610                the camera's flash unit, always firing it for still
611                captures.
612
613                The flash may be fired during a precapture sequence
614                (triggered by android.control.aePrecaptureTrigger) and
615                will always be fired for captures for which the
616                android.control.captureIntent field is set to
617                STILL_CAPTURE
618              </notes>
619            </value>
620            <value>ON_AUTO_FLASH_REDEYE
621              <notes>
622                Like ON_AUTO_FLASH, but with automatic red eye
623                reduction.
624
625                If deemed necessary by the camera device, a red eye
626                reduction flash will fire during the precapture
627                sequence.
628              </notes>
629            </value>
630          </enum>
631          <description>The desired mode for the camera device's
632          auto-exposure routine.</description>
633          <range>android.control.aeAvailableModes</range>
634          <details>
635            This control is only effective if android.control.mode is
636            AUTO.
637
638            When set to any of the ON modes, the camera device's
639            auto-exposure routine is enabled, overriding the
640            application's selected exposure time, sensor sensitivity,
641            and frame duration (android.sensor.exposureTime,
642            android.sensor.sensitivity, and
643            android.sensor.frameDuration). If one of the FLASH modes
644            is selected, the camera device's flash unit controls are
645            also overridden.
646
647            The FLASH modes are only available if the camera device
648            has a flash unit (android.flash.info.available is `true`).
649
650            If flash TORCH mode is desired, this field must be set to
651            ON or OFF, and android.flash.mode set to TORCH.
652
653            When set to any of the ON modes, the values chosen by the
654            camera device auto-exposure routine for the overridden
655            fields for a given capture will be available in its
656            CaptureResult.
657          </details>
658          <tag id="BC" />
659        </entry>
660        <entry name="aeRegions" type="int32" visibility="public"
661            optional="true" container="array" typedef="meteringRectangle">
662          <array>
663            <size>5</size>
664            <size>area_count</size>
665          </array>
666          <description>List of metering areas to use for auto-exposure adjustment.</description>
667          <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
668          <range>Coordinates must be between `[(0,0), (width, height))` of
669          android.sensor.info.activeArraySize</range>
670          <details>
671              Not available if android.control.maxRegionsAe is 0.
672              Otherwise will always be present.
673
674              The maximum number of regions supported by the device is determined by the value
675              of android.control.maxRegionsAe.
676
677              The data representation is int[5 * area_count].
678              Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
679              The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
680              ymax.
681
682              The coordinate system is based on the active pixel array,
683              with (0,0) being the top-left pixel in the active pixel array, and
684              (android.sensor.info.activeArraySize.width - 1,
685              android.sensor.info.activeArraySize.height - 1) being the
686              bottom-right pixel in the active pixel array.
687
688              The weight must be within `[0, 1000]`, and represents a weight
689              for every pixel in the area. This means that a large metering area
690              with the same weight as a smaller area will have more effect in
691              the metering result. Metering areas can partially overlap and the
692              camera device will add the weights in the overlap region.
693
694              The weights are relative to weights of other exposure metering regions, so if only one
695              region is used, all non-zero weights will have the same effect. A region with 0
696              weight is ignored.
697
698              If all regions have 0 weight, then no specific metering area needs to be used by the
699              camera device.
700
701              If the metering region is outside the used android.scaler.cropRegion returned in
702              capture result metadata, the camera device will ignore the sections outside the crop
703              region and output only the intersection rectangle as the metering region in the result
704              metadata.  If the region is entirely outside the crop region, it will be ignored and
705              not reported in the result metadata.
706          </details>
707          <hal_details>
708              The HAL level representation of MeteringRectangle[] is a
709              int[5 * area_count].
710              Every five elements represent a metering region of
711              (xmin, ymin, xmax, ymax, weight).
712              The rectangle is defined to be inclusive on xmin and ymin, but
713              exclusive on xmax and ymax.
714          </hal_details>
715          <tag id="BC" />
716        </entry>
717        <entry name="aeTargetFpsRange" type="int32" visibility="public"
718               container="array" typedef="rangeInt" hwlevel="legacy">
719          <array>
720            <size>2</size>
721          </array>
722          <description>Range over which the auto-exposure routine can
723          adjust the capture frame rate to maintain good
724          exposure.</description>
725          <units>Frames per second (FPS)</units>
726          <range>Any of the entries in android.control.aeAvailableTargetFpsRanges</range>
727          <details>Only constrains auto-exposure (AE) algorithm, not
728          manual control of android.sensor.exposureTime and
729          android.sensor.frameDuration.</details>
730          <tag id="BC" />
731        </entry>
732        <entry name="aePrecaptureTrigger" type="byte" visibility="public"
733               enum="true" hwlevel="limited">
734          <enum>
735            <value>IDLE
736              <notes>The trigger is idle.</notes>
737            </value>
738            <value>START
739              <notes>The precapture metering sequence will be started
740              by the camera device.
741
742              The exact effect of the precapture trigger depends on
743              the current AE mode and state.</notes>
744            </value>
745            <value>CANCEL
746              <notes>The camera device will cancel any currently active or completed
747              precapture metering sequence, the auto-exposure routine will return to its
748              initial state.</notes>
749            </value>
750          </enum>
751          <description>Whether the camera device will trigger a precapture
752          metering sequence when it processes this request.</description>
753          <details>This entry is normally set to IDLE, or is not
754          included at all in the request settings. When included and
755          set to START, the camera device will trigger the auto-exposure (AE)
756          precapture metering sequence.
757
758          When set to CANCEL, the camera device will cancel any active
759          precapture metering trigger, and return to its initial AE state.
760          If a precapture metering sequence is already completed, and the camera
761          device has implicitly locked the AE for subsequent still capture, the
762          CANCEL trigger will unlock the AE and return to its initial AE state.
763
764          The precapture sequence should be triggered before starting a
765          high-quality still capture for final metering decisions to
766          be made, and for firing pre-capture flash pulses to estimate
767          scene brightness and required final capture flash power, when
768          the flash is enabled.
769
770          Normally, this entry should be set to START for only a
771          single request, and the application should wait until the
772          sequence completes before starting a new one.
773
774          When a precapture metering sequence is finished, the camera device
775          may lock the auto-exposure routine internally to be able to accurately expose the
776          subsequent still capture image (`android.control.captureIntent == STILL_CAPTURE`).
777          For this case, the AE may not resume normal scan if no subsequent still capture is
778          submitted. To ensure that the AE routine restarts normal scan, the application should
779          submit a request with `android.control.aeLock == true`, followed by a request
780          with `android.control.aeLock == false`, if the application decides not to submit a
781          still capture request after the precapture sequence completes. Alternatively, for
782          API level 23 or newer devices, the CANCEL can be used to unlock the camera device
783          internally locked AE if the application doesn't submit a still capture request after
784          the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not
785          be used in devices that have earlier API levels.
786
787          The exact effect of auto-exposure (AE) precapture trigger
788          depends on the current AE mode and state; see
789          android.control.aeState for AE precapture state transition
790          details.
791
792          On LEGACY-level devices, the precapture trigger is not supported;
793          capturing a high-resolution JPEG image will automatically trigger a
794          precapture sequence before the high-resolution capture, including
795          potentially firing a pre-capture flash.
796
797          Using the precapture trigger and the auto-focus trigger android.control.afTrigger
798          simultaneously is allowed. However, since these triggers often require cooperation between
799          the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
800          focus sweep), the camera device may delay acting on a later trigger until the previous
801          trigger has been fully handled. This may lead to longer intervals between the trigger and
802          changes to android.control.aeState indicating the start of the precapture sequence, for
803          example.
804
805          If both the precapture and the auto-focus trigger are activated on the same request, then
806          the camera device will complete them in the optimal order for that device.
807          </details>
808          <hal_details>
809          The HAL must support triggering the AE precapture trigger while an AF trigger is active
810          (and vice versa), or at the same time as the AF trigger.  It is acceptable for the HAL to
811          treat these as two consecutive triggers, for example handling the AF trigger and then the
812          AE trigger.  Or the HAL may choose to optimize the case with both triggers fired at once,
813          to minimize the latency for converging both focus and exposure/flash usage.
814          </hal_details>
815          <tag id="BC" />
816        </entry>
817        <entry name="afMode" type="byte" visibility="public" enum="true"
818               hwlevel="legacy">
819          <enum>
820            <value>OFF
821            <notes>The auto-focus routine does not control the lens;
822            android.lens.focusDistance is controlled by the
823            application.</notes></value>
824            <value>AUTO
825            <notes>Basic automatic focus mode.
826
827            In this mode, the lens does not move unless
828            the autofocus trigger action is called. When that trigger
829            is activated, AF will transition to ACTIVE_SCAN, then to
830            the outcome of the scan (FOCUSED or NOT_FOCUSED).
831
832            Always supported if lens is not fixed focus.
833
834            Use android.lens.info.minimumFocusDistance to determine if lens
835            is fixed-focus.
836
837            Triggering AF_CANCEL resets the lens position to default,
838            and sets the AF state to INACTIVE.</notes></value>
839            <value>MACRO
840            <notes>Close-up focusing mode.
841
842            In this mode, the lens does not move unless the
843            autofocus trigger action is called. When that trigger is
844            activated, AF will transition to ACTIVE_SCAN, then to
845            the outcome of the scan (FOCUSED or NOT_FOCUSED). This
846            mode is optimized for focusing on objects very close to
847            the camera.
848
849            When that trigger is activated, AF will transition to
850            ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or
851            NOT_FOCUSED). Triggering cancel AF resets the lens
852            position to default, and sets the AF state to
853            INACTIVE.</notes></value>
854            <value>CONTINUOUS_VIDEO
855            <notes>In this mode, the AF algorithm modifies the lens
856            position continually to attempt to provide a
857            constantly-in-focus image stream.
858
859            The focusing behavior should be suitable for good quality
860            video recording; typically this means slower focus
861            movement and no overshoots. When the AF trigger is not
862            involved, the AF algorithm should start in INACTIVE state,
863            and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED
864            states as appropriate. When the AF trigger is activated,
865            the algorithm should immediately transition into
866            AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
867            lens position until a cancel AF trigger is received.
868
869            Once cancel is received, the algorithm should transition
870            back to INACTIVE and resume passive scan. Note that this
871            behavior is not identical to CONTINUOUS_PICTURE, since an
872            ongoing PASSIVE_SCAN must immediately be
873            canceled.</notes></value>
874            <value>CONTINUOUS_PICTURE
875            <notes>In this mode, the AF algorithm modifies the lens
876            position continually to attempt to provide a
877            constantly-in-focus image stream.
878
879            The focusing behavior should be suitable for still image
880            capture; typically this means focusing as fast as
881            possible. When the AF trigger is not involved, the AF
882            algorithm should start in INACTIVE state, and then
883            transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as
884            appropriate as it attempts to maintain focus. When the AF
885            trigger is activated, the algorithm should finish its
886            PASSIVE_SCAN if active, and then transition into
887            AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
888            lens position until a cancel AF trigger is received.
889
890            When the AF cancel trigger is activated, the algorithm
891            should transition back to INACTIVE and then act as if it
892            has just been started.</notes></value>
893            <value>EDOF
894            <notes>Extended depth of field (digital focus) mode.
895
896            The camera device will produce images with an extended
897            depth of field automatically; no special focusing
898            operations need to be done before taking a picture.
899
900            AF triggers are ignored, and the AF state will always be
901            INACTIVE.</notes></value>
902          </enum>
903          <description>Whether auto-focus (AF) is currently enabled, and what
904          mode it is set to.</description>
905          <range>android.control.afAvailableModes</range>
906          <details>Only effective if android.control.mode = AUTO and the lens is not fixed focus
907          (i.e. `android.lens.info.minimumFocusDistance &gt; 0`). Also note that
908          when android.control.aeMode is OFF, the behavior of AF is device
909          dependent. It is recommended to lock AF by using android.control.afTrigger before
910          setting android.control.aeMode to OFF, or set AF mode to OFF when AE is OFF.
911
912          If the lens is controlled by the camera device auto-focus algorithm,
913          the camera device will report the current AF status in android.control.afState
914          in result metadata.</details>
915          <hal_details>
916          When afMode is AUTO or MACRO, the lens must not move until an AF trigger is sent in a
917          request (android.control.afTrigger `==` START). After an AF trigger, the afState will end
918          up with either FOCUSED_LOCKED or NOT_FOCUSED_LOCKED state (see
919          android.control.afState for detailed state transitions), which indicates that the lens is
920          locked and will not move. If camera movement (e.g. tilting camera) causes the lens to move
921          after the lens is locked, the HAL must compensate this movement appropriately such that
922          the same focal plane remains in focus.
923
924          When afMode is one of the continuous auto focus modes, the HAL is free to start a AF
925          scan whenever it's not locked. When the lens is locked after an AF trigger
926          (see android.control.afState for detailed state transitions), the HAL should maintain the
927          same lock behavior as above.
928
929          When afMode is OFF, the application controls focus manually. The accuracy of the
930          focus distance control depends on the android.lens.info.focusDistanceCalibration.
931          However, the lens must not move regardless of the camera movement for any focus distance
932          manual control.
933
934          To put this in concrete terms, if the camera has lens elements which may move based on
935          camera orientation or motion (e.g. due to gravity), then the HAL must drive the lens to
936          remain in a fixed position invariant to the camera's orientation or motion, for example,
937          by using accelerometer measurements in the lens control logic. This is a typical issue
938          that will arise on camera modules with open-loop VCMs.
939          </hal_details>
940          <tag id="BC" />
941        </entry>
942        <entry name="afRegions" type="int32" visibility="public"
943               optional="true" container="array" typedef="meteringRectangle">
944          <array>
945            <size>5</size>
946            <size>area_count</size>
947          </array>
948          <description>List of metering areas to use for auto-focus.</description>
949          <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
950          <range>Coordinates must be between `[(0,0), (width, height))` of
951          android.sensor.info.activeArraySize</range>
952          <details>
953              Not available if android.control.maxRegionsAf is 0.
954              Otherwise will always be present.
955
956              The maximum number of focus areas supported by the device is determined by the value
957              of android.control.maxRegionsAf.
958
959              The data representation is int[5 * area_count].
960              Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
961              The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
962              ymax.
963
964              The coordinate system is based on the active pixel array,
965              with (0,0) being the top-left pixel in the active pixel array, and
966              (android.sensor.info.activeArraySize.width - 1,
967              android.sensor.info.activeArraySize.height - 1) being the
968              bottom-right pixel in the active pixel array.
969
970              The weight must be within `[0, 1000]`, and represents a weight
971              for every pixel in the area. This means that a large metering area
972              with the same weight as a smaller area will have more effect in
973              the metering result. Metering areas can partially overlap and the
974              camera device will add the weights in the overlap region.
975
976              The weights are relative to weights of other metering regions, so if only one region
977              is used, all non-zero weights will have the same effect. A region with 0 weight is
978              ignored.
979
980              If all regions have 0 weight, then no specific metering area needs to be used by the
981              camera device.
982
983              If the metering region is outside the used android.scaler.cropRegion returned in
984              capture result metadata, the camera device will ignore the sections outside the crop
985              region and output only the intersection rectangle as the metering region in the result
986              metadata. If the region is entirely outside the crop region, it will be ignored and
987              not reported in the result metadata.
988          </details>
989          <hal_details>
990              The HAL level representation of MeteringRectangle[] is a
991              int[5 * area_count].
992              Every five elements represent a metering region of
993              (xmin, ymin, xmax, ymax, weight).
994              The rectangle is defined to be inclusive on xmin and ymin, but
995              exclusive on xmax and ymax.
996          </hal_details>
997          <tag id="BC" />
998        </entry>
999        <entry name="afTrigger" type="byte" visibility="public" enum="true"
1000               hwlevel="legacy">
1001          <enum>
1002            <value>IDLE
1003              <notes>The trigger is idle.</notes>
1004            </value>
1005            <value>START
1006              <notes>Autofocus will trigger now.</notes>
1007            </value>
1008            <value>CANCEL
1009              <notes>Autofocus will return to its initial
1010              state, and cancel any currently active trigger.</notes>
1011            </value>
1012          </enum>
1013          <description>
1014          Whether the camera device will trigger autofocus for this request.
1015          </description>
1016          <details>This entry is normally set to IDLE, or is not
1017          included at all in the request settings.
1018
1019          When included and set to START, the camera device will trigger the
1020          autofocus algorithm. If autofocus is disabled, this trigger has no effect.
1021
1022          When set to CANCEL, the camera device will cancel any active trigger,
1023          and return to its initial AF state.
1024
1025          Generally, applications should set this entry to START or CANCEL for only a
1026          single capture, and then return it to IDLE (or not set at all). Specifying
1027          START for multiple captures in a row means restarting the AF operation over
1028          and over again.
1029
1030          See android.control.afState for what the trigger means for each AF mode.
1031
1032          Using the autofocus trigger and the precapture trigger android.control.aePrecaptureTrigger
1033          simultaneously is allowed. However, since these triggers often require cooperation between
1034          the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
1035          focus sweep), the camera device may delay acting on a later trigger until the previous
1036          trigger has been fully handled. This may lead to longer intervals between the trigger and
1037          changes to android.control.afState, for example.
1038          </details>
1039          <hal_details>
1040          The HAL must support triggering the AF trigger while an AE precapture trigger is active
1041          (and vice versa), or at the same time as the AE trigger.  It is acceptable for the HAL to
1042          treat these as two consecutive triggers, for example handling the AF trigger and then the
1043          AE trigger.  Or the HAL may choose to optimize the case with both triggers fired at once,
1044          to minimize the latency for converging both focus and exposure/flash usage.
1045          </hal_details>
1046          <tag id="BC" />
1047        </entry>
1048        <entry name="awbLock" type="byte" visibility="public" enum="true"
1049               typedef="boolean" hwlevel="legacy">
1050          <enum>
1051            <value>OFF
1052            <notes>Auto-white balance lock is disabled; the AWB
1053            algorithm is free to update its parameters if in AUTO
1054            mode.</notes></value>
1055            <value>ON
1056            <notes>Auto-white balance lock is enabled; the AWB
1057            algorithm will not update its parameters while the lock
1058            is active.</notes></value>
1059          </enum>
1060          <description>Whether auto-white balance (AWB) is currently locked to its
1061          latest calculated values.</description>
1062          <details>
1063          When set to `true` (ON), the AWB algorithm is locked to its latest parameters,
1064          and will not change color balance settings until the lock is set to `false` (OFF).
1065
1066          Since the camera device has a pipeline of in-flight requests, the settings that
1067          get locked do not necessarily correspond to the settings that were present in the
1068          latest capture result received from the camera device, since additional captures
1069          and AWB updates may have occurred even before the result was sent out. If an
1070          application is switching between automatic and manual control and wishes to eliminate
1071          any flicker during the switch, the following procedure is recommended:
1072
1073            1. Starting in auto-AWB mode:
1074            2. Lock AWB
1075            3. Wait for the first result to be output that has the AWB locked
1076            4. Copy AWB settings from that result into a request, set the request to manual AWB
1077            5. Submit the capture request, proceed to run manual AWB as desired.
1078
1079          Note that AWB lock is only meaningful when
1080          android.control.awbMode is in the AUTO mode; in other modes,
1081          AWB is already fixed to a specific setting.
1082
1083          Some LEGACY devices may not support ON; the value is then overridden to OFF.
1084          </details>
1085          <tag id="BC" />
1086        </entry>
1087        <entry name="awbMode" type="byte" visibility="public" enum="true"
1088               hwlevel="legacy">
1089          <enum>
1090            <value>OFF
1091            <notes>
1092            The camera device's auto-white balance routine is disabled.
1093
1094            The application-selected color transform matrix
1095            (android.colorCorrection.transform) and gains
1096            (android.colorCorrection.gains) are used by the camera
1097            device for manual white balance control.
1098            </notes>
1099            </value>
1100            <value>AUTO
1101            <notes>
1102            The camera device's auto-white balance routine is active.
1103
1104            The application's values for android.colorCorrection.transform
1105            and android.colorCorrection.gains are ignored.
1106            For devices that support the MANUAL_POST_PROCESSING capability, the
1107            values used by the camera device for the transform and gains
1108            will be available in the capture result for this request.
1109            </notes>
1110            </value>
1111            <value>INCANDESCENT
1112            <notes>
1113            The camera device's auto-white balance routine is disabled;
1114            the camera device uses incandescent light as the assumed scene
1115            illumination for white balance.
1116
1117            While the exact white balance transforms are up to the
1118            camera device, they will approximately match the CIE
1119            standard illuminant A.
1120
1121            The application's values for android.colorCorrection.transform
1122            and android.colorCorrection.gains are ignored.
1123            For devices that support the MANUAL_POST_PROCESSING capability, the
1124            values used by the camera device for the transform and gains
1125            will be available in the capture result for this request.
1126            </notes>
1127            </value>
1128            <value>FLUORESCENT
1129            <notes>
1130            The camera device's auto-white balance routine is disabled;
1131            the camera device uses fluorescent light as the assumed scene
1132            illumination for white balance.
1133
1134            While the exact white balance transforms are up to the
1135            camera device, they will approximately match the CIE
1136            standard illuminant F2.
1137
1138            The application's values for android.colorCorrection.transform
1139            and android.colorCorrection.gains are ignored.
1140            For devices that support the MANUAL_POST_PROCESSING capability, the
1141            values used by the camera device for the transform and gains
1142            will be available in the capture result for this request.
1143            </notes>
1144            </value>
1145            <value>WARM_FLUORESCENT
1146            <notes>
1147            The camera device's auto-white balance routine is disabled;
1148            the camera device uses warm fluorescent light as the assumed scene
1149            illumination for white balance.
1150
1151            While the exact white balance transforms are up to the
1152            camera device, they will approximately match the CIE
1153            standard illuminant F4.
1154
1155            The application's values for android.colorCorrection.transform
1156            and android.colorCorrection.gains are ignored.
1157            For devices that support the MANUAL_POST_PROCESSING capability, the
1158            values used by the camera device for the transform and gains
1159            will be available in the capture result for this request.
1160            </notes>
1161            </value>
1162            <value>DAYLIGHT
1163            <notes>
1164            The camera device's auto-white balance routine is disabled;
1165            the camera device uses daylight light as the assumed scene
1166            illumination for white balance.
1167
1168            While the exact white balance transforms are up to the
1169            camera device, they will approximately match the CIE
1170            standard illuminant D65.
1171
1172            The application's values for android.colorCorrection.transform
1173            and android.colorCorrection.gains are ignored.
1174            For devices that support the MANUAL_POST_PROCESSING capability, the
1175            values used by the camera device for the transform and gains
1176            will be available in the capture result for this request.
1177            </notes>
1178            </value>
1179            <value>CLOUDY_DAYLIGHT
1180            <notes>
1181            The camera device's auto-white balance routine is disabled;
1182            the camera device uses cloudy daylight light as the assumed scene
1183            illumination for white balance.
1184
1185            The application's values for android.colorCorrection.transform
1186            and android.colorCorrection.gains are ignored.
1187            For devices that support the MANUAL_POST_PROCESSING capability, the
1188            values used by the camera device for the transform and gains
1189            will be available in the capture result for this request.
1190            </notes>
1191            </value>
1192            <value>TWILIGHT
1193            <notes>
1194            The camera device's auto-white balance routine is disabled;
1195            the camera device uses twilight light as the assumed scene
1196            illumination for white balance.
1197
1198            The application's values for android.colorCorrection.transform
1199            and android.colorCorrection.gains are ignored.
1200            For devices that support the MANUAL_POST_PROCESSING capability, the
1201            values used by the camera device for the transform and gains
1202            will be available in the capture result for this request.
1203            </notes>
1204            </value>
1205            <value>SHADE
1206            <notes>
1207            The camera device's auto-white balance routine is disabled;
1208            the camera device uses shade light as the assumed scene
1209            illumination for white balance.
1210
1211            The application's values for android.colorCorrection.transform
1212            and android.colorCorrection.gains are ignored.
1213            For devices that support the MANUAL_POST_PROCESSING capability, the
1214            values used by the camera device for the transform and gains
1215            will be available in the capture result for this request.
1216            </notes>
1217            </value>
1218          </enum>
1219          <description>Whether auto-white balance (AWB) is currently setting the color
1220          transform fields, and what its illumination target
1221          is.</description>
1222          <range>android.control.awbAvailableModes</range>
1223          <details>
1224          This control is only effective if android.control.mode is AUTO.
1225
1226          When set to the ON mode, the camera device's auto-white balance
1227          routine is enabled, overriding the application's selected
1228          android.colorCorrection.transform, android.colorCorrection.gains and
1229          android.colorCorrection.mode. Note that when android.control.aeMode
1230          is OFF, the behavior of AWB is device dependent. It is recommened to
1231          also set AWB mode to OFF or lock AWB by using android.control.awbLock before
1232          setting AE mode to OFF.
1233
1234          When set to the OFF mode, the camera device's auto-white balance
1235          routine is disabled. The application manually controls the white
1236          balance by android.colorCorrection.transform, android.colorCorrection.gains
1237          and android.colorCorrection.mode.
1238
1239          When set to any other modes, the camera device's auto-white
1240          balance routine is disabled. The camera device uses each
1241          particular illumination target for white balance
1242          adjustment. The application's values for
1243          android.colorCorrection.transform,
1244          android.colorCorrection.gains and
1245          android.colorCorrection.mode are ignored.
1246          </details>
1247          <tag id="BC" />
1248        </entry>
1249        <entry name="awbRegions" type="int32" visibility="public"
1250               optional="true" container="array" typedef="meteringRectangle">
1251          <array>
1252            <size>5</size>
1253            <size>area_count</size>
1254          </array>
1255          <description>List of metering areas to use for auto-white-balance illuminant
1256          estimation.</description>
1257          <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
1258          <range>Coordinates must be between `[(0,0), (width, height))` of
1259          android.sensor.info.activeArraySize</range>
1260          <details>
1261              Not available if android.control.maxRegionsAwb is 0.
1262              Otherwise will always be present.
1263
1264              The maximum number of regions supported by the device is determined by the value
1265              of android.control.maxRegionsAwb.
1266
1267              The data representation is int[5 * area_count].
1268              Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
1269              The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
1270              ymax.
1271
1272              The coordinate system is based on the active pixel array,
1273              with (0,0) being the top-left pixel in the active pixel array, and
1274              (android.sensor.info.activeArraySize.width - 1,
1275              android.sensor.info.activeArraySize.height - 1) being the
1276              bottom-right pixel in the active pixel array.
1277
1278              The weight must range from 0 to 1000, and represents a weight
1279              for every pixel in the area. This means that a large metering area
1280              with the same weight as a smaller area will have more effect in
1281              the metering result. Metering areas can partially overlap and the
1282              camera device will add the weights in the overlap region.
1283
1284              The weights are relative to weights of other white balance metering regions, so if
1285              only one region is used, all non-zero weights will have the same effect. A region with
1286              0 weight is ignored.
1287
1288              If all regions have 0 weight, then no specific metering area needs to be used by the
1289              camera device.
1290
1291              If the metering region is outside the used android.scaler.cropRegion returned in
1292              capture result metadata, the camera device will ignore the sections outside the crop
1293              region and output only the intersection rectangle as the metering region in the result
1294              metadata.  If the region is entirely outside the crop region, it will be ignored and
1295              not reported in the result metadata.
1296          </details>
1297          <hal_details>
1298              The HAL level representation of MeteringRectangle[] is a
1299              int[5 * area_count].
1300              Every five elements represent a metering region of
1301              (xmin, ymin, xmax, ymax, weight).
1302              The rectangle is defined to be inclusive on xmin and ymin, but
1303              exclusive on xmax and ymax.
1304          </hal_details>
1305          <tag id="BC" />
1306        </entry>
1307        <entry name="captureIntent" type="byte" visibility="public" enum="true"
1308               hwlevel="legacy">
1309          <enum>
1310            <value>CUSTOM
1311            <notes>The goal of this request doesn't fall into the other
1312            categories. The camera device will default to preview-like
1313            behavior.</notes></value>
1314            <value>PREVIEW
1315            <notes>This request is for a preview-like use case.
1316
1317            The precapture trigger may be used to start off a metering
1318            w/flash sequence.
1319            </notes></value>
1320            <value>STILL_CAPTURE
1321            <notes>This request is for a still capture-type
1322            use case.
1323
1324            If the flash unit is under automatic control, it may fire as needed.
1325            </notes></value>
1326            <value>VIDEO_RECORD
1327            <notes>This request is for a video recording
1328            use case.</notes></value>
1329            <value>VIDEO_SNAPSHOT
1330            <notes>This request is for a video snapshot (still
1331            image while recording video) use case.
1332
1333            The camera device should take the highest-quality image
1334            possible (given the other settings) without disrupting the
1335            frame rate of video recording.  </notes></value>
1336            <value>ZERO_SHUTTER_LAG
1337            <notes>This request is for a ZSL usecase; the
1338            application will stream full-resolution images and
1339            reprocess one or several later for a final
1340            capture.
1341            </notes></value>
1342            <value>MANUAL
1343            <notes>This request is for manual capture use case where
1344            the applications want to directly control the capture parameters.
1345
1346            For example, the application may wish to manually control
1347            android.sensor.exposureTime, android.sensor.sensitivity, etc.
1348            </notes></value>
1349          </enum>
1350          <description>Information to the camera device 3A (auto-exposure,
1351          auto-focus, auto-white balance) routines about the purpose
1352          of this capture, to help the camera device to decide optimal 3A
1353          strategy.</description>
1354          <details>This control (except for MANUAL) is only effective if
1355          `android.control.mode != OFF` and any 3A routine is active.
1356
1357          ZERO_SHUTTER_LAG will be supported if android.request.availableCapabilities
1358          contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
1359          android.request.availableCapabilities contains MANUAL_SENSOR. Other intent values are
1360          always supported.
1361          </details>
1362          <tag id="BC" />
1363        </entry>
1364        <entry name="effectMode" type="byte" visibility="public" enum="true"
1365               hwlevel="legacy">
1366          <enum>
1367            <value>OFF
1368              <notes>
1369              No color effect will be applied.
1370              </notes>
1371            </value>
1372            <value optional="true">MONO
1373              <notes>
1374              A "monocolor" effect where the image is mapped into
1375              a single color.
1376
1377              This will typically be grayscale.
1378              </notes>
1379            </value>
1380            <value optional="true">NEGATIVE
1381              <notes>
1382              A "photo-negative" effect where the image's colors
1383              are inverted.
1384              </notes>
1385            </value>
1386            <value optional="true">SOLARIZE
1387              <notes>
1388              A "solarisation" effect (Sabattier effect) where the
1389              image is wholly or partially reversed in
1390              tone.
1391              </notes>
1392            </value>
1393            <value optional="true">SEPIA
1394              <notes>
1395              A "sepia" effect where the image is mapped into warm
1396              gray, red, and brown tones.
1397              </notes>
1398            </value>
1399            <value optional="true">POSTERIZE
1400              <notes>
1401              A "posterization" effect where the image uses
1402              discrete regions of tone rather than a continuous
1403              gradient of tones.
1404              </notes>
1405            </value>
1406            <value optional="true">WHITEBOARD
1407              <notes>
1408              A "whiteboard" effect where the image is typically displayed
1409              as regions of white, with black or grey details.
1410              </notes>
1411            </value>
1412            <value optional="true">BLACKBOARD
1413              <notes>
1414              A "blackboard" effect where the image is typically displayed
1415              as regions of black, with white or grey details.
1416              </notes>
1417            </value>
1418            <value optional="true">AQUA
1419              <notes>
1420              An "aqua" effect where a blue hue is added to the image.
1421              </notes>
1422            </value>
1423          </enum>
1424          <description>A special color effect to apply.</description>
1425          <range>android.control.availableEffects</range>
1426          <details>
1427          When this mode is set, a color effect will be applied
1428          to images produced by the camera device. The interpretation
1429          and implementation of these color effects is left to the
1430          implementor of the camera device, and should not be
1431          depended on to be consistent (or present) across all
1432          devices.
1433          </details>
1434          <tag id="BC" />
1435        </entry>
1436        <entry name="mode" type="byte" visibility="public" enum="true"
1437               hwlevel="legacy">
1438          <enum>
1439            <value>OFF
1440            <notes>Full application control of pipeline.
1441
1442            All control by the device's metering and focusing (3A)
1443            routines is disabled, and no other settings in
1444            android.control.* have any effect, except that
1445            android.control.captureIntent may be used by the camera
1446            device to select post-processing values for processing
1447            blocks that do not allow for manual control, or are not
1448            exposed by the camera API.
1449
1450            However, the camera device's 3A routines may continue to
1451            collect statistics and update their internal state so that
1452            when control is switched to AUTO mode, good control values
1453            can be immediately applied.
1454            </notes></value>
1455            <value>AUTO
1456            <notes>Use settings for each individual 3A routine.
1457
1458            Manual control of capture parameters is disabled. All
1459            controls in android.control.* besides sceneMode take
1460            effect.</notes></value>
1461            <value optional="true">USE_SCENE_MODE
1462            <notes>Use a specific scene mode.
1463
1464            Enabling this disables control.aeMode, control.awbMode and
1465            control.afMode controls; the camera device will ignore
1466            those settings while USE_SCENE_MODE is active (except for
1467            FACE_PRIORITY scene mode). Other control entries are still active.
1468            This setting can only be used if scene mode is supported (i.e.
1469            android.control.availableSceneModes
1470            contain some modes other than DISABLED).</notes></value>
1471            <value optional="true">OFF_KEEP_STATE
1472            <notes>Same as OFF mode, except that this capture will not be
1473            used by camera device background auto-exposure, auto-white balance and
1474            auto-focus algorithms (3A) to update their statistics.
1475
1476            Specifically, the 3A routines are locked to the last
1477            values set from a request with AUTO, OFF, or
1478            USE_SCENE_MODE, and any statistics or state updates
1479            collected from manual captures with OFF_KEEP_STATE will be
1480            discarded by the camera device.
1481            </notes></value>
1482          </enum>
1483          <description>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
1484          routines.</description>
1485          <range>android.control.availableModes</range>
1486          <details>
1487          This is a top-level 3A control switch. When set to OFF, all 3A control
1488          by the camera device is disabled. The application must set the fields for
1489          capture parameters itself.
1490
1491          When set to AUTO, the individual algorithm controls in
1492          android.control.* are in effect, such as android.control.afMode.
1493
1494          When set to USE_SCENE_MODE, the individual controls in
1495          android.control.* are mostly disabled, and the camera device implements
1496          one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
1497          as it wishes. The camera device scene mode 3A settings are provided by
1498          capture results {@link ACameraMetadata} from
1499          {@link ACameraCaptureSession_captureCallback_result}.
1500
1501          When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
1502          is that this frame will not be used by camera device background 3A statistics
1503          update, as if this frame is never captured. This mode can be used in the scenario
1504          where the application doesn't want a 3A manual control capture to affect
1505          the subsequent auto 3A capture results.
1506          </details>
1507          <tag id="BC" />
1508        </entry>
1509        <entry name="sceneMode" type="byte" visibility="public" enum="true"
1510               hwlevel="legacy">
1511          <enum>
1512            <value id="0">DISABLED
1513              <notes>
1514              Indicates that no scene modes are set for a given capture request.
1515              </notes>
1516            </value>
1517            <value>FACE_PRIORITY
1518              <notes>If face detection support exists, use face
1519              detection data for auto-focus, auto-white balance, and
1520              auto-exposure routines.
1521
1522              If face detection statistics are disabled
1523              (i.e. android.statistics.faceDetectMode is set to OFF),
1524              this should still operate correctly (but will not return
1525              face detection statistics to the framework).
1526
1527              Unlike the other scene modes, android.control.aeMode,
1528              android.control.awbMode, and android.control.afMode
1529              remain active when FACE_PRIORITY is set.
1530              </notes>
1531            </value>
1532            <value optional="true">ACTION
1533              <notes>
1534              Optimized for photos of quickly moving objects.
1535
1536              Similar to SPORTS.
1537              </notes>
1538            </value>
1539            <value optional="true">PORTRAIT
1540              <notes>
1541              Optimized for still photos of people.
1542              </notes>
1543            </value>
1544            <value optional="true">LANDSCAPE
1545              <notes>
1546              Optimized for photos of distant macroscopic objects.
1547              </notes>
1548            </value>
1549            <value optional="true">NIGHT
1550              <notes>
1551              Optimized for low-light settings.
1552              </notes>
1553            </value>
1554            <value optional="true">NIGHT_PORTRAIT
1555              <notes>
1556              Optimized for still photos of people in low-light
1557              settings.
1558              </notes>
1559            </value>
1560            <value optional="true">THEATRE
1561              <notes>
1562              Optimized for dim, indoor settings where flash must
1563              remain off.
1564              </notes>
1565            </value>
1566            <value optional="true">BEACH
1567              <notes>
1568              Optimized for bright, outdoor beach settings.
1569              </notes>
1570            </value>
1571            <value optional="true">SNOW
1572              <notes>
1573              Optimized for bright, outdoor settings containing snow.
1574              </notes>
1575            </value>
1576            <value optional="true">SUNSET
1577              <notes>
1578              Optimized for scenes of the setting sun.
1579              </notes>
1580            </value>
1581            <value optional="true">STEADYPHOTO
1582              <notes>
1583              Optimized to avoid blurry photos due to small amounts of
1584              device motion (for example: due to hand shake).
1585              </notes>
1586            </value>
1587            <value optional="true">FIREWORKS
1588              <notes>
1589              Optimized for nighttime photos of fireworks.
1590              </notes>
1591            </value>
1592            <value optional="true">SPORTS
1593              <notes>
1594              Optimized for photos of quickly moving people.
1595
1596              Similar to ACTION.
1597              </notes>
1598            </value>
1599            <value optional="true">PARTY
1600              <notes>
1601              Optimized for dim, indoor settings with multiple moving
1602              people.
1603              </notes>
1604            </value>
1605            <value optional="true">CANDLELIGHT
1606              <notes>
1607              Optimized for dim settings where the main light source
1608              is a flame.
1609              </notes>
1610            </value>
1611            <value optional="true">BARCODE
1612              <notes>
1613              Optimized for accurately capturing a photo of barcode
1614              for use by camera applications that wish to read the
1615              barcode value.
1616              </notes>
1617            </value>
1618            <value deprecated="true" optional="true" ndk_hidden="true">HIGH_SPEED_VIDEO
1619              <notes>
1620              This is deprecated, please use {@link
1621              android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
1622              and {@link
1623              android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
1624              for high speed video recording.
1625
1626              Optimized for high speed video recording (frame rate >=60fps) use case.
1627
1628              The supported high speed video sizes and fps ranges are specified in
1629              android.control.availableHighSpeedVideoConfigurations. To get desired
1630              output frame rates, the application is only allowed to select video size
1631              and fps range combinations listed in this static metadata. The fps range
1632              can be control via android.control.aeTargetFpsRange.
1633
1634              In this mode, the camera device will override aeMode, awbMode, and afMode to
1635              ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
1636              controls will be overridden to be FAST. Therefore, no manual control of capture
1637              and post-processing parameters is possible. All other controls operate the
1638              same as when android.control.mode == AUTO. This means that all other
1639              android.control.* fields continue to work, such as
1640
1641              * android.control.aeTargetFpsRange
1642              * android.control.aeExposureCompensation
1643              * android.control.aeLock
1644              * android.control.awbLock
1645              * android.control.effectMode
1646              * android.control.aeRegions
1647              * android.control.afRegions
1648              * android.control.awbRegions
1649              * android.control.afTrigger
1650              * android.control.aePrecaptureTrigger
1651
1652              Outside of android.control.*, the following controls will work:
1653
1654              * android.flash.mode (automatic flash for still capture will not work since aeMode is ON)
1655              * android.lens.opticalStabilizationMode (if it is supported)
1656              * android.scaler.cropRegion
1657              * android.statistics.faceDetectMode
1658
1659              For high speed recording use case, the actual maximum supported frame rate may
1660              be lower than what camera can output, depending on the destination Surfaces for
1661              the image data. For example, if the destination surface is from video encoder,
1662              the application need check if the video encoder is capable of supporting the
1663              high frame rate for a given video size, or it will end up with lower recording
1664              frame rate. If the destination surface is from preview window, the preview frame
1665              rate will be bounded by the screen refresh rate.
1666
1667              The camera device will only support up to 2 output high speed streams
1668              (processed non-stalling format defined in android.request.maxNumOutputStreams)
1669              in this mode. This control will be effective only if all of below conditions are true:
1670
1671              * The application created no more than maxNumHighSpeedStreams processed non-stalling
1672              format output streams, where maxNumHighSpeedStreams is calculated as
1673              min(2, android.request.maxNumOutputStreams[Processed (but not-stalling)]).
1674              * The stream sizes are selected from the sizes reported by
1675              android.control.availableHighSpeedVideoConfigurations.
1676              * No processed non-stalling or raw streams are configured.
1677
1678              When above conditions are NOT satistied, the controls of this mode and
1679              android.control.aeTargetFpsRange will be ignored by the camera device,
1680              the camera device will fall back to android.control.mode `==` AUTO,
1681              and the returned capture result metadata will give the fps range choosen
1682              by the camera device.
1683
1684              Switching into or out of this mode may trigger some camera ISP/sensor
1685              reconfigurations, which may introduce extra latency. It is recommended that
1686              the application avoids unnecessary scene mode switch as much as possible.
1687              </notes>
1688            </value>
1689            <value optional="true">HDR
1690              <notes>
1691              Turn on a device-specific high dynamic range (HDR) mode.
1692
1693              In this scene mode, the camera device captures images
1694              that keep a larger range of scene illumination levels
1695              visible in the final image. For example, when taking a
1696              picture of a object in front of a bright window, both
1697              the object and the scene through the window may be
1698              visible when using HDR mode, while in normal AUTO mode,
1699              one or the other may be poorly exposed. As a tradeoff,
1700              HDR mode generally takes much longer to capture a single
1701              image, has no user control, and may have other artifacts
1702              depending on the HDR method used.
1703
1704              Therefore, HDR captures operate at a much slower rate
1705              than regular captures.
1706
1707              In this mode, on LIMITED or FULL devices, when a request
1708              is made with a android.control.captureIntent of
1709              STILL_CAPTURE, the camera device will capture an image
1710              using a high dynamic range capture technique.  On LEGACY
1711              devices, captures that target a JPEG-format output will
1712              be captured with HDR, and the capture intent is not
1713              relevant.
1714
1715              The HDR capture may involve the device capturing a burst
1716              of images internally and combining them into one, or it
1717              may involve the device using specialized high dynamic
1718              range capture hardware. In all cases, a single image is
1719              produced in response to a capture request submitted
1720              while in HDR mode.
1721
1722              Since substantial post-processing is generally needed to
1723              produce an HDR image, only YUV, PRIVATE, and JPEG
1724              outputs are supported for LIMITED/FULL device HDR
1725              captures, and only JPEG outputs are supported for LEGACY
1726              HDR captures. Using a RAW output for HDR capture is not
1727              supported.
1728
1729              Some devices may also support always-on HDR, which
1730              applies HDR processing at full frame rate.  For these
1731              devices, intents other than STILL_CAPTURE will also
1732              produce an HDR output with no frame rate impact compared
1733              to normal operation, though the quality may be lower
1734              than for STILL_CAPTURE intents.
1735
1736              If SCENE_MODE_HDR is used with unsupported output types
1737              or capture intents, the images captured will be as if
1738              the SCENE_MODE was not enabled at all.
1739              </notes>
1740            </value>
1741            <value optional="true" hidden="true">FACE_PRIORITY_LOW_LIGHT
1742              <notes>Same as FACE_PRIORITY scene mode, except that the camera
1743              device will choose higher sensitivity values (android.sensor.sensitivity)
1744              under low light conditions.
1745
1746              The camera device may be tuned to expose the images in a reduced
1747              sensitivity range to produce the best quality images. For example,
1748              if the android.sensor.info.sensitivityRange gives range of [100, 1600],
1749              the camera device auto-exposure routine tuning process may limit the actual
1750              exposure sensitivity range to [100, 1200] to ensure that the noise level isn't
1751              exessive in order to preserve the image quality. Under this situation, the image under
1752              low light may be under-exposed when the sensor max exposure time (bounded by the
1753              android.control.aeTargetFpsRange when android.control.aeMode is one of the
1754              ON_* modes) and effective max sensitivity are reached. This scene mode allows the
1755              camera device auto-exposure routine to increase the sensitivity up to the max
1756              sensitivity specified by android.sensor.info.sensitivityRange when the scene is too
1757              dark and the max exposure time is reached. The captured images may be noisier
1758              compared with the images captured in normal FACE_PRIORITY mode; therefore, it is
1759              recommended that the application only use this scene mode when it is capable of
1760              reducing the noise level of the captured images.
1761
1762              Unlike the other scene modes, android.control.aeMode,
1763              android.control.awbMode, and android.control.afMode
1764              remain active when FACE_PRIORITY_LOW_LIGHT is set.
1765              </notes>
1766            </value>
1767            <value optional="true" hidden="true" id="100">DEVICE_CUSTOM_START
1768              <notes>
1769                Scene mode values within the range of
1770                `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific
1771                customized scene modes.
1772              </notes>
1773            </value>
1774            <value optional="true" hidden="true" id="127">DEVICE_CUSTOM_END
1775              <notes>
1776                Scene mode values within the range of
1777                `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific
1778                customized scene modes.
1779              </notes>
1780            </value>
1781          </enum>
1782          <description>
1783          Control for which scene mode is currently active.
1784          </description>
1785          <range>android.control.availableSceneModes</range>
1786          <details>
1787          Scene modes are custom camera modes optimized for a certain set of conditions and
1788          capture settings.
1789
1790          This is the mode that that is active when
1791          `android.control.mode == USE_SCENE_MODE`. Aside from FACE_PRIORITY, these modes will
1792          disable android.control.aeMode, android.control.awbMode, and android.control.afMode
1793          while in use.
1794
1795          The interpretation and implementation of these scene modes is left
1796          to the implementor of the camera device. Their behavior will not be
1797          consistent across all devices, and any given device may only implement
1798          a subset of these modes.
1799          </details>
1800          <hal_details>
1801          HAL implementations that include scene modes are expected to provide
1802          the per-scene settings to use for android.control.aeMode,
1803          android.control.awbMode, and android.control.afMode in
1804          android.control.sceneModeOverrides.
1805
1806          For HIGH_SPEED_VIDEO mode, if it is included in android.control.availableSceneModes,
1807          the HAL must list supported video size and fps range in
1808          android.control.availableHighSpeedVideoConfigurations. For a given size, e.g.
1809          1280x720, if the HAL has two different sensor configurations for normal streaming
1810          mode and high speed streaming, when this scene mode is set/reset in a sequence of capture
1811          requests, the HAL may have to switch between different sensor modes.
1812          This mode is deprecated in HAL3.3, to support high speed video recording, please implement
1813          android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO
1814          capbility defined in android.request.availableCapabilities.
1815          </hal_details>
1816          <tag id="BC" />
1817        </entry>
1818        <entry name="videoStabilizationMode" type="byte" visibility="public"
1819               enum="true" hwlevel="legacy">
1820          <enum>
1821            <value>OFF
1822            <notes>
1823              Video stabilization is disabled.
1824            </notes></value>
1825            <value>ON
1826            <notes>
1827              Video stabilization is enabled.
1828            </notes></value>
1829          </enum>
1830          <description>Whether video stabilization is
1831          active.</description>
1832          <details>
1833          Video stabilization automatically warps images from
1834          the camera in order to stabilize motion between consecutive frames.
1835
1836          If enabled, video stabilization can modify the
1837          android.scaler.cropRegion to keep the video stream stabilized.
1838
1839          Switching between different video stabilization modes may take several
1840          frames to initialize, the camera device will report the current mode
1841          in capture result metadata. For example, When "ON" mode is requested,
1842          the video stabilization modes in the first several capture results may
1843          still be "OFF", and it will become "ON" when the initialization is
1844          done.
1845
1846          In addition, not all recording sizes or frame rates may be supported for
1847          stabilization by a device that reports stabilization support. It is guaranteed
1848          that an output targeting a MediaRecorder or MediaCodec will be stabilized if
1849          the recording resolution is less than or equal to 1920 x 1080 (width less than
1850          or equal to 1920, height less than or equal to 1080), and the recording
1851          frame rate is less than or equal to 30fps.  At other sizes, the CaptureResult
1852          android.control.videoStabilizationMode field will return
1853          OFF if the recording output is not stabilized, or if there are no output
1854          Surface types that can be stabilized.
1855
1856          If a camera device supports both this mode and OIS
1857          (android.lens.opticalStabilizationMode), turning both modes on may
1858          produce undesirable interaction, so it is recommended not to enable
1859          both at the same time.
1860          </details>
1861          <tag id="BC" />
1862        </entry>
1863      </controls>
1864      <static>
1865        <entry name="aeAvailableAntibandingModes" type="byte" visibility="public"
1866               type_notes="list of enums" container="array" typedef="enumList"
1867               hwlevel="legacy">
1868          <array>
1869            <size>n</size>
1870          </array>
1871          <description>
1872            List of auto-exposure antibanding modes for android.control.aeAntibandingMode that are
1873            supported by this camera device.
1874          </description>
1875          <range>Any value listed in android.control.aeAntibandingMode</range>
1876          <details>
1877            Not all of the auto-exposure anti-banding modes may be
1878            supported by a given camera device. This field lists the
1879            valid anti-banding modes that the application may request
1880            for this camera device with the
1881            android.control.aeAntibandingMode control.
1882          </details>
1883          <tag id="BC" />
1884        </entry>
1885        <entry name="aeAvailableModes" type="byte" visibility="public"
1886               type_notes="list of enums" container="array" typedef="enumList"
1887               hwlevel="legacy">
1888          <array>
1889            <size>n</size>
1890          </array>
1891          <description>
1892            List of auto-exposure modes for android.control.aeMode that are supported by this camera
1893            device.
1894          </description>
1895          <range>Any value listed in android.control.aeMode</range>
1896          <details>
1897            Not all the auto-exposure modes may be supported by a
1898            given camera device, especially if no flash unit is
1899            available. This entry lists the valid modes for
1900            android.control.aeMode for this camera device.
1901
1902            All camera devices support ON, and all camera devices with flash
1903            units support ON_AUTO_FLASH and ON_ALWAYS_FLASH.
1904
1905            FULL mode camera devices always support OFF mode,
1906            which enables application control of camera exposure time,
1907            sensitivity, and frame duration.
1908
1909            LEGACY mode camera devices never support OFF mode.
1910            LIMITED mode devices support OFF if they support the MANUAL_SENSOR
1911            capability.
1912          </details>
1913          <tag id="BC" />
1914        </entry>
1915        <entry name="aeAvailableTargetFpsRanges" type="int32" visibility="public"
1916               type_notes="list of pairs of frame rates"
1917               container="array" typedef="rangeInt"
1918               hwlevel="legacy">
1919          <array>
1920            <size>2</size>
1921            <size>n</size>
1922          </array>
1923          <description>List of frame rate ranges for android.control.aeTargetFpsRange supported by
1924          this camera device.</description>
1925          <units>Frames per second (FPS)</units>
1926          <details>
1927          For devices at the LEGACY level or above:
1928
1929          * For constant-framerate recording, for each normal
1930          [CamcorderProfile](https://developer.android.com/reference/android/media/CamcorderProfile.html), that is, a
1931          [CamcorderProfile](https://developer.android.com/reference/android/media/CamcorderProfile.html) that has
1932          [quality](https://developer.android.com/reference/android/media/CamcorderProfile.html#quality)
1933          in the range [
1934          [QUALITY_LOW](https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_LOW),
1935          [QUALITY_2160P](https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_2160P)],
1936          if the profile is supported by the device and has
1937          [videoFrameRate](https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate)
1938          `x`, this list will always include (`x`,`x`).
1939
1940          * Also, a camera device must either not support any
1941          [CamcorderProfile](https://developer.android.com/reference/android/media/CamcorderProfile.html),
1942          or support at least one
1943          normal [CamcorderProfile](https://developer.android.com/reference/android/media/CamcorderProfile.html)
1944          that has
1945          [videoFrameRate](https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate) `x` &gt;= 24.
1946
1947          For devices at the LIMITED level or above:
1948
1949          * For YUV_420_888 burst capture use case, this list will always include (`min`, `max`)
1950          and (`max`, `max`) where `min` &lt;= 15 and `max` = the maximum output frame rate of the
1951          maximum YUV_420_888 output size.
1952          </details>
1953          <tag id="BC" />
1954        </entry>
1955        <entry name="aeCompensationRange" type="int32" visibility="public"
1956               container="array" typedef="rangeInt"
1957               hwlevel="legacy">
1958          <array>
1959            <size>2</size>
1960          </array>
1961          <description>Maximum and minimum exposure compensation values for
1962          android.control.aeExposureCompensation, in counts of android.control.aeCompensationStep,
1963          that are supported by this camera device.</description>
1964          <range>
1965            Range [0,0] indicates that exposure compensation is not supported.
1966
1967            For LIMITED and FULL devices, range must follow below requirements if exposure
1968            compensation is supported (`range != [0, 0]`):
1969
1970            `Min.exposure compensation * android.control.aeCompensationStep &lt;= -2 EV`
1971
1972            `Max.exposure compensation * android.control.aeCompensationStep &gt;= 2 EV`
1973
1974            LEGACY devices may support a smaller range than this.
1975          </range>
1976          <tag id="BC" />
1977        </entry>
1978        <entry name="aeCompensationStep" type="rational" visibility="public"
1979               hwlevel="legacy">
1980          <description>Smallest step by which the exposure compensation
1981          can be changed.</description>
1982          <units>Exposure Value (EV)</units>
1983          <details>
1984          This is the unit for android.control.aeExposureCompensation. For example, if this key has
1985          a value of `1/2`, then a setting of `-2` for android.control.aeExposureCompensation means
1986          that the target EV offset for the auto-exposure routine is -1 EV.
1987
1988          One unit of EV compensation changes the brightness of the captured image by a factor
1989          of two. +1 EV doubles the image brightness, while -1 EV halves the image brightness.
1990          </details>
1991          <hal_details>
1992            This must be less than or equal to 1/2.
1993          </hal_details>
1994          <tag id="BC" />
1995        </entry>
1996        <entry name="afAvailableModes" type="byte" visibility="public"
1997               type_notes="List of enums" container="array" typedef="enumList"
1998               hwlevel="legacy">
1999          <array>
2000            <size>n</size>
2001          </array>
2002          <description>
2003          List of auto-focus (AF) modes for android.control.afMode that are
2004          supported by this camera device.
2005          </description>
2006          <range>Any value listed in android.control.afMode</range>
2007          <details>
2008          Not all the auto-focus modes may be supported by a
2009          given camera device. This entry lists the valid modes for
2010          android.control.afMode for this camera device.
2011
2012          All LIMITED and FULL mode camera devices will support OFF mode, and all
2013          camera devices with adjustable focuser units
2014          (`android.lens.info.minimumFocusDistance &gt; 0`) will support AUTO mode.
2015
2016          LEGACY devices will support OFF mode only if they support
2017          focusing to infinity (by also setting android.lens.focusDistance to
2018          `0.0f`).
2019          </details>
2020          <tag id="BC" />
2021        </entry>
2022        <entry name="availableEffects" type="byte" visibility="public"
2023               type_notes="List of enums (android.control.effectMode)." container="array"
2024               typedef="enumList" hwlevel="legacy">
2025          <array>
2026            <size>n</size>
2027          </array>
2028          <description>
2029          List of color effects for android.control.effectMode that are supported by this camera
2030          device.
2031          </description>
2032          <range>Any value listed in android.control.effectMode</range>
2033          <details>
2034          This list contains the color effect modes that can be applied to
2035          images produced by the camera device.
2036          Implementations are not expected to be consistent across all devices.
2037          If no color effect modes are available for a device, this will only list
2038          OFF.
2039
2040          A color effect will only be applied if
2041          android.control.mode != OFF.  OFF is always included in this list.
2042
2043          This control has no effect on the operation of other control routines such
2044          as auto-exposure, white balance, or focus.
2045          </details>
2046          <tag id="BC" />
2047        </entry>
2048        <entry name="availableSceneModes" type="byte" visibility="public"
2049               type_notes="List of enums (android.control.sceneMode)."
2050               container="array" typedef="enumList" hwlevel="legacy">
2051          <array>
2052            <size>n</size>
2053          </array>
2054          <description>
2055          List of scene modes for android.control.sceneMode that are supported by this camera
2056          device.
2057          </description>
2058          <range>Any value listed in android.control.sceneMode</range>
2059          <details>
2060          This list contains scene modes that can be set for the camera device.
2061          Only scene modes that have been fully implemented for the
2062          camera device may be included here. Implementations are not expected
2063          to be consistent across all devices.
2064
2065          If no scene modes are supported by the camera device, this
2066          will be set to DISABLED. Otherwise DISABLED will not be listed.
2067
2068          FACE_PRIORITY is always listed if face detection is
2069          supported (i.e.`android.statistics.info.maxFaceCount &gt;
2070          0`).
2071          </details>
2072          <tag id="BC" />
2073        </entry>
2074        <entry name="availableVideoStabilizationModes" type="byte"
2075               visibility="public" type_notes="List of enums." container="array"
2076               typedef="enumList" hwlevel="legacy">
2077          <array>
2078            <size>n</size>
2079          </array>
2080          <description>
2081          List of video stabilization modes for android.control.videoStabilizationMode
2082          that are supported by this camera device.
2083          </description>
2084          <range>Any value listed in android.control.videoStabilizationMode</range>
2085          <details>
2086          OFF will always be listed.
2087          </details>
2088          <tag id="BC" />
2089        </entry>
2090        <entry name="awbAvailableModes" type="byte" visibility="public"
2091               type_notes="List of enums"
2092               container="array" typedef="enumList" hwlevel="legacy">
2093          <array>
2094            <size>n</size>
2095          </array>
2096          <description>
2097          List of auto-white-balance modes for android.control.awbMode that are supported by this
2098          camera device.
2099          </description>
2100          <range>Any value listed in android.control.awbMode</range>
2101          <details>
2102          Not all the auto-white-balance modes may be supported by a
2103          given camera device. This entry lists the valid modes for
2104          android.control.awbMode for this camera device.
2105
2106          All camera devices will support ON mode.
2107
2108          Camera devices that support the MANUAL_POST_PROCESSING capability will always support OFF
2109          mode, which enables application control of white balance, by using
2110          android.colorCorrection.transform and android.colorCorrection.gains
2111          (android.colorCorrection.mode must be set to TRANSFORM_MATRIX). This includes all FULL
2112          mode camera devices.
2113          </details>
2114          <tag id="BC" />
2115        </entry>
2116        <entry name="maxRegions" type="int32" visibility="ndk_public"
2117               container="array" hwlevel="legacy">
2118          <array>
2119            <size>3</size>
2120          </array>
2121          <description>
2122          List of the maximum number of regions that can be used for metering in
2123          auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF);
2124          this corresponds to the the maximum number of elements in
2125          android.control.aeRegions, android.control.awbRegions,
2126          and android.control.afRegions.
2127          </description>
2128          <range>
2129          Value must be &amp;gt;= 0 for each element. For full-capability devices
2130          this value must be &amp;gt;= 1 for AE and AF. The order of the elements is:
2131          `(AE, AWB, AF)`.</range>
2132          <tag id="BC" />
2133        </entry>
2134        <entry name="maxRegionsAe" type="int32" visibility="java_public"
2135               synthetic="true" hwlevel="legacy">
2136          <description>
2137          The maximum number of metering regions that can be used by the auto-exposure (AE)
2138          routine.
2139          </description>
2140          <range>Value will be &amp;gt;= 0. For FULL-capability devices, this
2141          value will be &amp;gt;= 1.
2142          </range>
2143          <details>
2144          This corresponds to the the maximum allowed number of elements in
2145          android.control.aeRegions.
2146          </details>
2147          <hal_details>This entry is private to the framework. Fill in
2148          maxRegions to have this entry be automatically populated.
2149          </hal_details>
2150        </entry>
2151        <entry name="maxRegionsAwb" type="int32" visibility="java_public"
2152               synthetic="true" hwlevel="legacy">
2153          <description>
2154          The maximum number of metering regions that can be used by the auto-white balance (AWB)
2155          routine.
2156          </description>
2157          <range>Value will be &amp;gt;= 0.
2158          </range>
2159          <details>
2160          This corresponds to the the maximum allowed number of elements in
2161          android.control.awbRegions.
2162          </details>
2163          <hal_details>This entry is private to the framework. Fill in
2164          maxRegions to have this entry be automatically populated.
2165          </hal_details>
2166        </entry>
2167        <entry name="maxRegionsAf" type="int32" visibility="java_public"
2168               synthetic="true" hwlevel="legacy">
2169          <description>
2170          The maximum number of metering regions that can be used by the auto-focus (AF) routine.
2171          </description>
2172          <range>Value will be &amp;gt;= 0. For FULL-capability devices, this
2173          value will be &amp;gt;= 1.
2174          </range>
2175          <details>
2176          This corresponds to the the maximum allowed number of elements in
2177          android.control.afRegions.
2178          </details>
2179          <hal_details>This entry is private to the framework. Fill in
2180          maxRegions to have this entry be automatically populated.
2181          </hal_details>
2182        </entry>
2183        <entry name="sceneModeOverrides" type="byte" visibility="system"
2184               container="array" hwlevel="limited">
2185          <array>
2186            <size>3</size>
2187            <size>length(availableSceneModes)</size>
2188          </array>
2189          <description>
2190          Ordered list of auto-exposure, auto-white balance, and auto-focus
2191          settings to use with each available scene mode.
2192          </description>
2193          <range>
2194          For each available scene mode, the list must contain three
2195          entries containing the android.control.aeMode,
2196          android.control.awbMode, and android.control.afMode values used
2197          by the camera device. The entry order is `(aeMode, awbMode, afMode)`
2198          where aeMode has the lowest index position.
2199          </range>
2200          <details>
2201          When a scene mode is enabled, the camera device is expected
2202          to override android.control.aeMode, android.control.awbMode,
2203          and android.control.afMode with its preferred settings for
2204          that scene mode.
2205
2206          The order of this list matches that of availableSceneModes,
2207          with 3 entries for each mode.  The overrides listed
2208          for FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported) are ignored,
2209          since for that mode the application-set android.control.aeMode,
2210          android.control.awbMode, and android.control.afMode values are
2211          used instead, matching the behavior when android.control.mode
2212          is set to AUTO. It is recommended that the FACE_PRIORITY and
2213          FACE_PRIORITY_LOW_LIGHT (if supported) overrides should be set to 0.
2214
2215          For example, if availableSceneModes contains
2216          `(FACE_PRIORITY, ACTION, NIGHT)`,  then the camera framework
2217          expects sceneModeOverrides to have 9 entries formatted like:
2218          `(0, 0, 0, ON_AUTO_FLASH, AUTO, CONTINUOUS_PICTURE,
2219          ON_AUTO_FLASH, INCANDESCENT, AUTO)`.
2220          </details>
2221          <hal_details>
2222          To maintain backward compatibility, this list will be made available
2223          in the static metadata of the camera service.  The camera service will
2224          use these values to set android.control.aeMode,
2225          android.control.awbMode, and android.control.afMode when using a scene
2226          mode other than FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported).
2227          </hal_details>
2228          <tag id="BC" />
2229        </entry>
2230      </static>
2231      <dynamic>
2232        <entry name="aePrecaptureId" type="int32" visibility="system" deprecated="true">
2233          <description>The ID sent with the latest
2234          CAMERA2_TRIGGER_PRECAPTURE_METERING call</description>
2235          <details>Must be 0 if no
2236          CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet
2237          by HAL. Always updated even if AE algorithm ignores the
2238          trigger</details>
2239        </entry>
2240        <clone entry="android.control.aeAntibandingMode" kind="controls">
2241        </clone>
2242        <clone entry="android.control.aeExposureCompensation" kind="controls">
2243        </clone>
2244        <clone entry="android.control.aeLock" kind="controls">
2245        </clone>
2246        <clone entry="android.control.aeMode" kind="controls">
2247        </clone>
2248        <clone entry="android.control.aeRegions" kind="controls">
2249        </clone>
2250        <clone entry="android.control.aeTargetFpsRange" kind="controls">
2251        </clone>
2252        <clone entry="android.control.aePrecaptureTrigger" kind="controls">
2253        </clone>
2254        <entry name="aeState" type="byte" visibility="public" enum="true"
2255               hwlevel="limited">
2256          <enum>
2257            <value>INACTIVE
2258            <notes>AE is off or recently reset.
2259
2260            When a camera device is opened, it starts in
2261            this state. This is a transient state, the camera device may skip reporting
2262            this state in capture result.</notes></value>
2263            <value>SEARCHING
2264            <notes>AE doesn't yet have a good set of control values
2265            for the current scene.
2266
2267            This is a transient state, the camera device may skip
2268            reporting this state in capture result.</notes></value>
2269            <value>CONVERGED
2270            <notes>AE has a good set of control values for the
2271            current scene.</notes></value>
2272            <value>LOCKED
2273            <notes>AE has been locked.</notes></value>
2274            <value>FLASH_REQUIRED
2275            <notes>AE has a good set of control values, but flash
2276            needs to be fired for good quality still
2277            capture.</notes></value>
2278            <value>PRECAPTURE
2279            <notes>AE has been asked to do a precapture sequence
2280            and is currently executing it.
2281
2282            Precapture can be triggered through setting
2283            android.control.aePrecaptureTrigger to START. Currently
2284            active and completed (if it causes camera device internal AE lock) precapture
2285            metering sequence can be canceled through setting
2286            android.control.aePrecaptureTrigger to CANCEL.
2287
2288            Once PRECAPTURE completes, AE will transition to CONVERGED
2289            or FLASH_REQUIRED as appropriate. This is a transient
2290            state, the camera device may skip reporting this state in
2291            capture result.</notes></value>
2292          </enum>
2293          <description>Current state of the auto-exposure (AE) algorithm.</description>
2294          <details>Switching between or enabling AE modes (android.control.aeMode) always
2295          resets the AE state to INACTIVE. Similarly, switching between android.control.mode,
2296          or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
2297          the algorithm states to INACTIVE.
2298
2299          The camera device can do several state transitions between two results, if it is
2300          allowed by the state transition table. For example: INACTIVE may never actually be
2301          seen in a result.
2302
2303          The state in the result is the state for this image (in sync with this image): if
2304          AE state becomes CONVERGED, then the image data associated with this result should
2305          be good to use.
2306
2307          Below are state transition tables for different AE modes.
2308
2309            State       | Transition Cause | New State | Notes
2310          :------------:|:----------------:|:---------:|:-----------------------:
2311          INACTIVE      |                  | INACTIVE  | Camera device auto exposure algorithm is disabled
2312
2313          When android.control.aeMode is AE_MODE_ON_*:
2314
2315            State        | Transition Cause                             | New State      | Notes
2316          :-------------:|:--------------------------------------------:|:--------------:|:-----------------:
2317          INACTIVE       | Camera device initiates AE scan              | SEARCHING      | Values changing
2318          INACTIVE       | android.control.aeLock is ON                 | LOCKED         | Values locked
2319          SEARCHING      | Camera device finishes AE scan               | CONVERGED      | Good values, not changing
2320          SEARCHING      | Camera device finishes AE scan               | FLASH_REQUIRED | Converged but too dark w/o flash
2321          SEARCHING      | android.control.aeLock is ON                 | LOCKED         | Values locked
2322          CONVERGED      | Camera device initiates AE scan              | SEARCHING      | Values changing
2323          CONVERGED      | android.control.aeLock is ON                 | LOCKED         | Values locked
2324          FLASH_REQUIRED | Camera device initiates AE scan              | SEARCHING      | Values changing
2325          FLASH_REQUIRED | android.control.aeLock is ON                 | LOCKED         | Values locked
2326          LOCKED         | android.control.aeLock is OFF                | SEARCHING      | Values not good after unlock
2327          LOCKED         | android.control.aeLock is OFF                | CONVERGED      | Values good after unlock
2328          LOCKED         | android.control.aeLock is OFF                | FLASH_REQUIRED | Exposure good, but too dark
2329          PRECAPTURE     | Sequence done. android.control.aeLock is OFF | CONVERGED      | Ready for high-quality capture
2330          PRECAPTURE     | Sequence done. android.control.aeLock is ON  | LOCKED         | Ready for high-quality capture
2331          LOCKED         | aeLock is ON and aePrecaptureTrigger is START | LOCKED        | Precapture trigger is ignored when AE is already locked
2332          LOCKED         | aeLock is ON and aePrecaptureTrigger is CANCEL| LOCKED        | Precapture trigger is ignored when AE is already locked
2333          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START | PRECAPTURE     | Start AE precapture metering sequence
2334          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL| INACTIVE       | Currently active precapture metering sequence is canceled
2335
2336          For the above table, the camera device may skip reporting any state changes that happen
2337          without application intervention (i.e. mode switch, trigger, locking). Any state that
2338          can be skipped in that manner is called a transient state.
2339
2340          For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions
2341          listed in above table, it is also legal for the camera device to skip one or more
2342          transient states between two results. See below table for examples:
2343
2344            State        | Transition Cause                                            | New State      | Notes
2345          :-------------:|:-----------------------------------------------------------:|:--------------:|:-----------------:
2346          INACTIVE       | Camera device finished AE scan                              | CONVERGED      | Values are already good, transient states are skipped by camera device.
2347          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.
2348          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | CONVERGED      | Converged after a precapture sequence, transient states are skipped by camera device.
2349          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged    | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.
2350          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged    | CONVERGED      | Converged after a precapture sequenceis canceled, transient states are skipped by camera device.
2351          CONVERGED      | Camera device finished AE scan                              | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.
2352          FLASH_REQUIRED | Camera device finished AE scan                              | CONVERGED      | Converged after a new scan, transient states are skipped by camera device.
2353          </details>
2354        </entry>
2355        <clone entry="android.control.afMode" kind="controls">
2356        </clone>
2357        <clone entry="android.control.afRegions" kind="controls">
2358        </clone>
2359        <clone entry="android.control.afTrigger" kind="controls">
2360        </clone>
2361        <entry name="afState" type="byte" visibility="public" enum="true"
2362               hwlevel="legacy">
2363          <enum>
2364            <value>INACTIVE
2365            <notes>AF is off or has not yet tried to scan/been asked
2366            to scan.
2367
2368            When a camera device is opened, it starts in this
2369            state. This is a transient state, the camera device may
2370            skip reporting this state in capture
2371            result.</notes></value>
2372            <value>PASSIVE_SCAN
2373            <notes>AF is currently performing an AF scan initiated the
2374            camera device in a continuous autofocus mode.
2375
2376            Only used by CONTINUOUS_* AF modes. This is a transient
2377            state, the camera device may skip reporting this state in
2378            capture result.</notes></value>
2379            <value>PASSIVE_FOCUSED
2380            <notes>AF currently believes it is in focus, but may
2381            restart scanning at any time.
2382
2383            Only used by CONTINUOUS_* AF modes. This is a transient
2384            state, the camera device may skip reporting this state in
2385            capture result.</notes></value>
2386            <value>ACTIVE_SCAN
2387            <notes>AF is performing an AF scan because it was
2388            triggered by AF trigger.
2389
2390            Only used by AUTO or MACRO AF modes. This is a transient
2391            state, the camera device may skip reporting this state in
2392            capture result.</notes></value>
2393            <value>FOCUSED_LOCKED
2394            <notes>AF believes it is focused correctly and has locked
2395            focus.
2396
2397            This state is reached only after an explicit START AF trigger has been
2398            sent (android.control.afTrigger), when good focus has been obtained.
2399
2400            The lens will remain stationary until the AF mode (android.control.afMode) is changed or
2401            a new AF trigger is sent to the camera device (android.control.afTrigger).
2402            </notes></value>
2403            <value>NOT_FOCUSED_LOCKED
2404            <notes>AF has failed to focus successfully and has locked
2405            focus.
2406
2407            This state is reached only after an explicit START AF trigger has been
2408            sent (android.control.afTrigger), when good focus cannot be obtained.
2409
2410            The lens will remain stationary until the AF mode (android.control.afMode) is changed or
2411            a new AF trigger is sent to the camera device (android.control.afTrigger).
2412            </notes></value>
2413            <value>PASSIVE_UNFOCUSED
2414            <notes>AF finished a passive scan without finding focus,
2415            and may restart scanning at any time.
2416
2417            Only used by CONTINUOUS_* AF modes. This is a transient state, the camera
2418            device may skip reporting this state in capture result.
2419
2420            LEGACY camera devices do not support this state. When a passive
2421            scan has finished, it will always go to PASSIVE_FOCUSED.
2422            </notes></value>
2423          </enum>
2424          <description>Current state of auto-focus (AF) algorithm.</description>
2425          <details>
2426          Switching between or enabling AF modes (android.control.afMode) always
2427          resets the AF state to INACTIVE. Similarly, switching between android.control.mode,
2428          or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
2429          the algorithm states to INACTIVE.
2430
2431          The camera device can do several state transitions between two results, if it is
2432          allowed by the state transition table. For example: INACTIVE may never actually be
2433          seen in a result.
2434
2435          The state in the result is the state for this image (in sync with this image): if
2436          AF state becomes FOCUSED, then the image data associated with this result should
2437          be sharp.
2438
2439          Below are state transition tables for different AF modes.
2440
2441          When android.control.afMode is AF_MODE_OFF or AF_MODE_EDOF:
2442
2443            State       | Transition Cause | New State | Notes
2444          :------------:|:----------------:|:---------:|:-----------:
2445          INACTIVE      |                  | INACTIVE  | Never changes
2446
2447          When android.control.afMode is AF_MODE_AUTO or AF_MODE_MACRO:
2448
2449            State            | Transition Cause | New State          | Notes
2450          :-----------------:|:----------------:|:------------------:|:--------------:
2451          INACTIVE           | AF_TRIGGER       | ACTIVE_SCAN        | Start AF sweep, Lens now moving
2452          ACTIVE_SCAN        | AF sweep done    | FOCUSED_LOCKED     | Focused, Lens now locked
2453          ACTIVE_SCAN        | AF sweep done    | NOT_FOCUSED_LOCKED | Not focused, Lens now locked
2454          ACTIVE_SCAN        | AF_CANCEL        | INACTIVE           | Cancel/reset AF, Lens now locked
2455          FOCUSED_LOCKED     | AF_CANCEL        | INACTIVE           | Cancel/reset AF
2456          FOCUSED_LOCKED     | AF_TRIGGER       | ACTIVE_SCAN        | Start new sweep, Lens now moving
2457          NOT_FOCUSED_LOCKED | AF_CANCEL        | INACTIVE           | Cancel/reset AF
2458          NOT_FOCUSED_LOCKED | AF_TRIGGER       | ACTIVE_SCAN        | Start new sweep, Lens now moving
2459          Any state          | Mode change      | INACTIVE           |
2460
2461          For the above table, the camera device may skip reporting any state changes that happen
2462          without application intervention (i.e. mode switch, trigger, locking). Any state that
2463          can be skipped in that manner is called a transient state.
2464
2465          For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the
2466          state transitions listed in above table, it is also legal for the camera device to skip
2467          one or more transient states between two results. See below table for examples:
2468
2469            State            | Transition Cause | New State          | Notes
2470          :-----------------:|:----------------:|:------------------:|:--------------:
2471          INACTIVE           | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is already good or good after a scan, lens is now locked.
2472          INACTIVE           | AF_TRIGGER       | NOT_FOCUSED_LOCKED | Focus failed after a scan, lens is now locked.
2473          FOCUSED_LOCKED     | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is already good or good after a scan, lens is now locked.
2474          NOT_FOCUSED_LOCKED | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is good after a scan, lens is not locked.
2475
2476
2477          When android.control.afMode is AF_MODE_CONTINUOUS_VIDEO:
2478
2479            State            | Transition Cause                    | New State          | Notes
2480          :-----------------:|:-----------------------------------:|:------------------:|:--------------:
2481          INACTIVE           | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
2482          INACTIVE           | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
2483          PASSIVE_SCAN       | Camera device completes current scan| PASSIVE_FOCUSED    | End AF scan, Lens now locked
2484          PASSIVE_SCAN       | Camera device fails current scan    | PASSIVE_UNFOCUSED  | End AF scan, Lens now locked
2485          PASSIVE_SCAN       | AF_TRIGGER                          | FOCUSED_LOCKED     | Immediate transition, if focus is good. Lens now locked
2486          PASSIVE_SCAN       | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | Immediate transition, if focus is bad. Lens now locked
2487          PASSIVE_SCAN       | AF_CANCEL                           | INACTIVE           | Reset lens position, Lens now locked
2488          PASSIVE_FOCUSED    | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
2489          PASSIVE_UNFOCUSED  | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
2490          PASSIVE_FOCUSED    | AF_TRIGGER                          | FOCUSED_LOCKED     | Immediate transition, lens now locked
2491          PASSIVE_UNFOCUSED  | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | Immediate transition, lens now locked
2492          FOCUSED_LOCKED     | AF_TRIGGER                          | FOCUSED_LOCKED     | No effect
2493          FOCUSED_LOCKED     | AF_CANCEL                           | INACTIVE           | Restart AF scan
2494          NOT_FOCUSED_LOCKED | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | No effect
2495          NOT_FOCUSED_LOCKED | AF_CANCEL                           | INACTIVE           | Restart AF scan
2496
2497          When android.control.afMode is AF_MODE_CONTINUOUS_PICTURE:
2498
2499            State            | Transition Cause                     | New State          | Notes
2500          :-----------------:|:------------------------------------:|:------------------:|:--------------:
2501          INACTIVE           | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
2502          INACTIVE           | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
2503          PASSIVE_SCAN       | Camera device completes current scan | PASSIVE_FOCUSED    | End AF scan, Lens now locked
2504          PASSIVE_SCAN       | Camera device fails current scan     | PASSIVE_UNFOCUSED  | End AF scan, Lens now locked
2505          PASSIVE_SCAN       | AF_TRIGGER                           | FOCUSED_LOCKED     | Eventual transition once the focus is good. Lens now locked
2506          PASSIVE_SCAN       | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | Eventual transition if cannot find focus. Lens now locked
2507          PASSIVE_SCAN       | AF_CANCEL                            | INACTIVE           | Reset lens position, Lens now locked
2508          PASSIVE_FOCUSED    | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
2509          PASSIVE_UNFOCUSED  | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
2510          PASSIVE_FOCUSED    | AF_TRIGGER                           | FOCUSED_LOCKED     | Immediate trans. Lens now locked
2511          PASSIVE_UNFOCUSED  | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | Immediate trans. Lens now locked
2512          FOCUSED_LOCKED     | AF_TRIGGER                           | FOCUSED_LOCKED     | No effect
2513          FOCUSED_LOCKED     | AF_CANCEL                            | INACTIVE           | Restart AF scan
2514          NOT_FOCUSED_LOCKED | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | No effect
2515          NOT_FOCUSED_LOCKED | AF_CANCEL                            | INACTIVE           | Restart AF scan
2516
2517          When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO
2518          (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the
2519          camera device. When a trigger is included in a mode switch request, the trigger
2520          will be evaluated in the context of the new mode in the request.
2521          See below table for examples:
2522
2523            State      | Transition Cause                       | New State                                | Notes
2524          :-----------:|:--------------------------------------:|:----------------------------------------:|:--------------:
2525          any state    | CAF-->AUTO mode switch                 | INACTIVE                                 | Mode switch without trigger, initial state must be INACTIVE
2526          any state    | CAF-->AUTO mode switch with AF_TRIGGER | trigger-reachable states from INACTIVE   | Mode switch with trigger, INACTIVE is skipped
2527          any state    | AUTO-->CAF mode switch                 | passively reachable states from INACTIVE | Mode switch without trigger, passive transient state is skipped
2528          </details>
2529        </entry>
2530        <entry name="afTriggerId" type="int32" visibility="system" deprecated="true">
2531          <description>The ID sent with the latest
2532          CAMERA2_TRIGGER_AUTOFOCUS call</description>
2533          <details>Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger
2534          received yet by HAL. Always updated even if AF algorithm
2535          ignores the trigger</details>
2536        </entry>
2537        <clone entry="android.control.awbLock" kind="controls">
2538        </clone>
2539        <clone entry="android.control.awbMode" kind="controls">
2540        </clone>
2541        <clone entry="android.control.awbRegions" kind="controls">
2542        </clone>
2543        <clone entry="android.control.captureIntent" kind="controls">
2544        </clone>
2545        <entry name="awbState" type="byte" visibility="public" enum="true"
2546               hwlevel="limited">
2547          <enum>
2548            <value>INACTIVE
2549            <notes>AWB is not in auto mode, or has not yet started metering.
2550
2551            When a camera device is opened, it starts in this
2552            state. This is a transient state, the camera device may
2553            skip reporting this state in capture
2554            result.</notes></value>
2555            <value>SEARCHING
2556            <notes>AWB doesn't yet have a good set of control
2557            values for the current scene.
2558
2559            This is a transient state, the camera device
2560            may skip reporting this state in capture result.</notes></value>
2561            <value>CONVERGED
2562            <notes>AWB has a good set of control values for the
2563            current scene.</notes></value>
2564            <value>LOCKED
2565            <notes>AWB has been locked.
2566            </notes></value>
2567          </enum>
2568          <description>Current state of auto-white balance (AWB) algorithm.</description>
2569          <details>Switching between or enabling AWB modes (android.control.awbMode) always
2570          resets the AWB state to INACTIVE. Similarly, switching between android.control.mode,
2571          or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
2572          the algorithm states to INACTIVE.
2573
2574          The camera device can do several state transitions between two results, if it is
2575          allowed by the state transition table. So INACTIVE may never actually be seen in
2576          a result.
2577
2578          The state in the result is the state for this image (in sync with this image): if
2579          AWB state becomes CONVERGED, then the image data associated with this result should
2580          be good to use.
2581
2582          Below are state transition tables for different AWB modes.
2583
2584          When `android.control.awbMode != AWB_MODE_AUTO`:
2585
2586            State       | Transition Cause | New State | Notes
2587          :------------:|:----------------:|:---------:|:-----------------------:
2588          INACTIVE      |                  |INACTIVE   |Camera device auto white balance algorithm is disabled
2589
2590          When android.control.awbMode is AWB_MODE_AUTO:
2591
2592            State        | Transition Cause                 | New State     | Notes
2593          :-------------:|:--------------------------------:|:-------------:|:-----------------:
2594          INACTIVE       | Camera device initiates AWB scan | SEARCHING     | Values changing
2595          INACTIVE       | android.control.awbLock is ON    | LOCKED        | Values locked
2596          SEARCHING      | Camera device finishes AWB scan  | CONVERGED     | Good values, not changing
2597          SEARCHING      | android.control.awbLock is ON    | LOCKED        | Values locked
2598          CONVERGED      | Camera device initiates AWB scan | SEARCHING     | Values changing
2599          CONVERGED      | android.control.awbLock is ON    | LOCKED        | Values locked
2600          LOCKED         | android.control.awbLock is OFF   | SEARCHING     | Values not good after unlock
2601
2602          For the above table, the camera device may skip reporting any state changes that happen
2603          without application intervention (i.e. mode switch, trigger, locking). Any state that
2604          can be skipped in that manner is called a transient state.
2605
2606          For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions
2607          listed in above table, it is also legal for the camera device to skip one or more
2608          transient states between two results. See below table for examples:
2609
2610            State        | Transition Cause                 | New State     | Notes
2611          :-------------:|:--------------------------------:|:-------------:|:-----------------:
2612          INACTIVE       | Camera device finished AWB scan  | CONVERGED     | Values are already good, transient states are skipped by camera device.
2613          LOCKED         | android.control.awbLock is OFF   | CONVERGED     | Values good after unlock, transient states are skipped by camera device.
2614          </details>
2615        </entry>
2616        <clone entry="android.control.effectMode" kind="controls">
2617        </clone>
2618        <clone entry="android.control.mode" kind="controls">
2619        </clone>
2620        <clone entry="android.control.sceneMode" kind="controls">
2621        </clone>
2622        <clone entry="android.control.videoStabilizationMode" kind="controls">
2623        </clone>
2624      </dynamic>
2625      <static>
2626        <entry name="availableHighSpeedVideoConfigurations" type="int32" visibility="hidden"
2627               container="array" typedef="highSpeedVideoConfiguration" hwlevel="limited">
2628          <array>
2629            <size>5</size>
2630            <size>n</size>
2631          </array>
2632          <description>
2633          List of available high speed video size, fps range and max batch size configurations
2634          supported by the camera device, in the format of (width, height, fps_min, fps_max, batch_size_max).
2635          </description>
2636          <range>
2637          For each configuration, the fps_max &amp;gt;= 120fps.
2638          </range>
2639          <details>
2640          When CONSTRAINED_HIGH_SPEED_VIDEO is supported in android.request.availableCapabilities,
2641          this metadata will list the supported high speed video size, fps range and max batch size
2642          configurations. All the sizes listed in this configuration will be a subset of the sizes
2643          reported by {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes}
2644          for processed non-stalling formats.
2645
2646          For the high speed video use case, the application must
2647          select the video size and fps range from this metadata to configure the recording and
2648          preview streams and setup the recording requests. For example, if the application intends
2649          to do high speed recording, it can select the maximum size reported by this metadata to
2650          configure output streams. Once the size is selected, application can filter this metadata
2651          by selected size and get the supported fps ranges, and use these fps ranges to setup the
2652          recording requests. Note that for the use case of multiple output streams, application
2653          must select one unique size from this metadata to use (e.g., preview and recording streams
2654          must have the same size). Otherwise, the high speed capture session creation will fail.
2655
2656          The min and max fps will be multiple times of 30fps.
2657
2658          High speed video streaming extends significant performance pressue to camera hardware,
2659          to achieve efficient high speed streaming, the camera device may have to aggregate
2660          multiple frames together and send to camera device for processing where the request
2661          controls are same for all the frames in this batch. Max batch size indicates
2662          the max possible number of frames the camera device will group together for this high
2663          speed stream configuration. This max batch size will be used to generate a high speed
2664          recording request list by
2665          {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
2666          The max batch size for each configuration will satisfy below conditions:
2667
2668          * Each max batch size will be a divisor of its corresponding fps_max / 30. For example,
2669          if max_fps is 300, max batch size will only be 1, 2, 5, or 10.
2670          * The camera device may choose smaller internal batch size for each configuration, but
2671          the actual batch size will be a divisor of max batch size. For example, if the max batch
2672          size is 8, the actual batch size used by camera device will only be 1, 2, 4, or 8.
2673          * The max batch size in each configuration entry must be no larger than 32.
2674
2675          The camera device doesn't have to support batch mode to achieve high speed video recording,
2676          in such case, batch_size_max will be reported as 1 in each configuration entry.
2677
2678          This fps ranges in this configuration list can only be used to create requests
2679          that are submitted to a high speed camera capture session created by
2680          {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}.
2681          The fps ranges reported in this metadata must not be used to setup capture requests for
2682          normal capture session, or it will cause request error.
2683          </details>
2684          <hal_details>
2685          All the sizes listed in this configuration will be a subset of the sizes reported by
2686          android.scaler.availableStreamConfigurations for processed non-stalling output formats.
2687          Note that for all high speed video configurations, HAL must be able to support a minimum
2688          of two streams, though the application might choose to configure just one stream.
2689
2690          The HAL may support multiple sensor modes for high speed outputs, for example, 120fps
2691          sensor mode and 120fps recording, 240fps sensor mode for 240fps recording. The application
2692          usually starts preview first, then starts recording. To avoid sensor mode switch caused
2693          stutter when starting recording as much as possible, the application may want to ensure
2694          the same sensor mode is used for preview and recording. Therefore, The HAL must advertise
2695          the variable fps range [30, fps_max] for each fixed fps range in this configuration list.
2696          For example, if the HAL advertises [120, 120] and [240, 240], the HAL must also advertise
2697          [30, 120] and [30, 240] for each configuration. In doing so, if the application intends to
2698          do 120fps recording, it can select [30, 120] to start preview, and [120, 120] to start
2699          recording. For these variable fps ranges, it's up to the HAL to decide the actual fps
2700          values that are suitable for smooth preview streaming. If the HAL sees different max_fps
2701          values that fall into different sensor modes in a sequence of requests, the HAL must
2702          switch the sensor mode as quick as possible to minimize the mode switch caused stutter.
2703          </hal_details>
2704          <tag id="V1" />
2705        </entry>
2706        <entry name="aeLockAvailable" type="byte" visibility="public" enum="true"
2707               typedef="boolean" hwlevel="legacy">
2708          <enum>
2709            <value>FALSE</value>
2710            <value>TRUE</value>
2711          </enum>
2712          <description>Whether the camera device supports android.control.aeLock</description>
2713          <details>
2714              Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always
2715              list `true`. This includes FULL devices.
2716          </details>
2717          <tag id="BC"/>
2718        </entry>
2719        <entry name="awbLockAvailable" type="byte" visibility="public" enum="true"
2720               typedef="boolean" hwlevel="legacy">
2721          <enum>
2722            <value>FALSE</value>
2723            <value>TRUE</value>
2724          </enum>
2725          <description>Whether the camera device supports android.control.awbLock</description>
2726          <details>
2727              Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will
2728              always list `true`. This includes FULL devices.
2729          </details>
2730          <tag id="BC"/>
2731        </entry>
2732        <entry name="availableModes" type="byte" visibility="public"
2733            type_notes="List of enums (android.control.mode)." container="array"
2734            typedef="enumList" hwlevel="legacy">
2735          <array>
2736            <size>n</size>
2737          </array>
2738          <description>
2739          List of control modes for android.control.mode that are supported by this camera
2740          device.
2741          </description>
2742          <range>Any value listed in android.control.mode</range>
2743          <details>
2744              This list contains control modes that can be set for the camera device.
2745              LEGACY mode devices will always support AUTO mode. LIMITED and FULL
2746              devices will always support OFF, AUTO modes.
2747          </details>
2748        </entry>
2749        <entry name="postRawSensitivityBoostRange" type="int32" visibility="public"
2750            type_notes="Range of supported post RAW sensitivitiy boosts"
2751            container="array" typedef="rangeInt">
2752          <array>
2753            <size>2</size>
2754          </array>
2755          <description>Range of boosts for android.control.postRawSensitivityBoost supported
2756            by this camera device.
2757          </description>
2758          <units>ISO arithmetic units, the same as android.sensor.sensitivity</units>
2759          <details>
2760            Devices support post RAW sensitivity boost  will advertise
2761            android.control.postRawSensitivityBoost key for controling
2762            post RAW sensitivity boost.
2763
2764            This key will be `null` for devices that do not support any RAW format
2765            outputs. For devices that do support RAW format outputs, this key will always
2766            present, and if a device does not support post RAW sensitivity boost, it will
2767            list `(100, 100)` in this key.
2768          </details>
2769          <hal_details>
2770             This key is added in HAL3.4. For HAL3.3 or earlier devices, camera framework will
2771             generate this key as `(100, 100)` if device supports any of RAW output formats.
2772             All HAL3.4 and above devices should list this key if device supports any of RAW
2773             output formats.
2774          </hal_details>
2775        </entry>
2776      </static>
2777      <controls>
2778        <entry name="postRawSensitivityBoost" type="int32" visibility="public">
2779          <description>The amount of additional sensitivity boost applied to output images
2780             after RAW sensor data is captured.
2781          </description>
2782          <units>ISO arithmetic units, the same as android.sensor.sensitivity</units>
2783          <range>android.control.postRawSensitivityBoostRange</range>
2784          <details>
2785          Some camera devices support additional digital sensitivity boosting in the
2786          camera processing pipeline after sensor RAW image is captured.
2787          Such a boost will be applied to YUV/JPEG format output images but will not
2788          have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE.
2789
2790          This key will be `null` for devices that do not support any RAW format
2791          outputs. For devices that do support RAW format outputs, this key will always
2792          present, and if a device does not support post RAW sensitivity boost, it will
2793          list `100` in this key.
2794
2795          If the camera device cannot apply the exact boost requested, it will reduce the
2796          boost to the nearest supported value.
2797          The final boost value used will be available in the output capture result.
2798
2799          For devices that support post RAW sensitivity boost, the YUV/JPEG output images
2800          of such device will have the total sensitivity of
2801          `android.sensor.sensitivity * android.control.postRawSensitivityBoost / 100`
2802          The sensitivity of RAW format images will always be `android.sensor.sensitivity`
2803
2804          This control is only effective if android.control.aeMode or android.control.mode is set to
2805          OFF; otherwise the auto-exposure algorithm will override this value.
2806          </details>
2807        </entry>
2808      </controls>
2809      <dynamic>
2810        <clone entry="android.control.postRawSensitivityBoost" kind="controls">
2811        </clone>
2812      </dynamic>
2813      <controls>
2814        <entry name="enableZsl" type="byte" visibility="public" enum="true" typedef="boolean">
2815          <enum>
2816            <value>FALSE
2817            <notes>Requests with android.control.captureIntent == STILL_CAPTURE must be captured
2818              after previous requests.</notes></value>
2819            <value>TRUE
2820            <notes>Requests with android.control.captureIntent == STILL_CAPTURE may or may not be
2821              captured before previous requests.</notes></value>
2822          </enum>
2823          <description>Allow camera device to enable zero-shutter-lag mode for requests with
2824            android.control.captureIntent == STILL_CAPTURE.
2825          </description>
2826          <details>
2827          If enableZsl is `true`, the camera device may enable zero-shutter-lag mode for requests with
2828          STILL_CAPTURE capture intent. The camera device may use images captured in the past to
2829          produce output images for a zero-shutter-lag request. The result metadata including the
2830          android.sensor.timestamp reflects the source frames used to produce output images.
2831          Therefore, the contents of the output images and the result metadata may be out of order
2832          compared to previous regular requests. enableZsl does not affect requests with other
2833          capture intents.
2834
2835          For example, when requests are submitted in the following order:
2836            Request A: enableZsl is `true`, android.control.captureIntent is PREVIEW
2837            Request B: enableZsl is `true`, android.control.captureIntent is STILL_CAPTURE
2838
2839          The output images for request B may have contents captured before the output images for
2840          request A, and the result metadata for request B may be older than the result metadata for
2841          request A.
2842
2843          Note that when enableZsl is `true`, it is not guaranteed to get output images captured in the
2844          past for requests with STILL_CAPTURE capture intent.
2845
2846          For applications targeting SDK versions O and newer, the value of enableZsl in
2847          TEMPLATE_STILL_CAPTURE template may be `true`. The value in other templates is always
2848          `false` if present.
2849
2850          For applications targeting SDK versions older than O, the value of enableZsl in all
2851          capture templates is always `false` if present.
2852
2853          For application-operated ZSL, use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
2854          </details>
2855          <hal_details>
2856          It is valid for HAL to produce regular output images for requests with STILL_CAPTURE
2857          capture intent.
2858          </hal_details>
2859        </entry>
2860      </controls>
2861      <dynamic>
2862        <clone entry="android.control.enableZsl" kind="controls">
2863        </clone>
2864      </dynamic>
2865    </section>
2866    <section name="demosaic">
2867      <controls>
2868        <entry name="mode" type="byte" enum="true">
2869          <enum>
2870            <value>FAST
2871            <notes>Minimal or no slowdown of frame rate compared to
2872            Bayer RAW output.</notes></value>
2873            <value>HIGH_QUALITY
2874            <notes>Improved processing quality but the frame rate might be slowed down
2875            relative to raw output.</notes></value>
2876          </enum>
2877          <description>Controls the quality of the demosaicing
2878          processing.</description>
2879          <tag id="FUTURE" />
2880        </entry>
2881      </controls>
2882    </section>
2883    <section name="edge">
2884      <controls>
2885        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
2886          <enum>
2887            <value>OFF
2888            <notes>No edge enhancement is applied.</notes></value>
2889            <value>FAST
2890            <notes>Apply edge enhancement at a quality level that does not slow down frame rate
2891            relative to sensor output. It may be the same as OFF if edge enhancement will
2892            slow down frame rate relative to sensor.</notes></value>
2893            <value>HIGH_QUALITY
2894            <notes>Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate.
2895            </notes></value>
2896            <value optional="true">ZERO_SHUTTER_LAG
2897            <notes>Edge enhancement is applied at different levels for different output streams,
2898            based on resolution. Streams at maximum recording resolution (see {@link
2899            ACameraDevice_createCaptureSession}) or below have
2900            edge enhancement applied, while higher-resolution streams have no edge enhancement
2901            applied. The level of edge enhancement for low-resolution streams is tuned so that
2902            frame rate is not impacted, and the quality is equal to or better than FAST (since it
2903            is only applied to lower-resolution outputs, quality may improve from FAST).
2904
2905            This mode is intended to be used by applications operating in a zero-shutter-lag mode
2906            with YUV or PRIVATE reprocessing, where the application continuously captures
2907            high-resolution intermediate buffers into a circular buffer, from which a final image is
2908            produced via reprocessing when a user takes a picture.  For such a use case, the
2909            high-resolution buffers must not have edge enhancement applied to maximize efficiency of
2910            preview and to avoid double-applying enhancement when reprocessed, while low-resolution
2911            buffers (used for recording or preview, generally) need edge enhancement applied for
2912            reasonable preview quality.
2913
2914            This mode is guaranteed to be supported by devices that support either the
2915            YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
2916            (android.request.availableCapabilities lists either of those capabilities) and it will
2917            be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
2918            </notes></value>
2919          </enum>
2920          <description>Operation mode for edge
2921          enhancement.</description>
2922          <range>android.edge.availableEdgeModes</range>
2923          <details>Edge enhancement improves sharpness and details in the captured image. OFF means
2924          no enhancement will be applied by the camera device.
2925
2926          FAST/HIGH_QUALITY both mean camera device determined enhancement
2927          will be applied. HIGH_QUALITY mode indicates that the
2928          camera device will use the highest-quality enhancement algorithms,
2929          even if it slows down capture rate. FAST means the camera device will
2930          not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if
2931          edge enhancement will slow down capture rate. Every output stream will have a similar
2932          amount of enhancement applied.
2933
2934          ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
2935          buffer of high-resolution images during preview and reprocess image(s) from that buffer
2936          into a final capture when triggered by the user. In this mode, the camera device applies
2937          edge enhancement to low-resolution streams (below maximum recording resolution) to
2938          maximize preview quality, but does not apply edge enhancement to high-resolution streams,
2939          since those will be reprocessed later if necessary.
2940
2941          For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera
2942          device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively.
2943          The camera device may adjust its internal edge enhancement parameters for best
2944          image quality based on the android.reprocess.effectiveExposureFactor, if it is set.
2945          </details>
2946          <hal_details>
2947          For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to
2948          adjust the internal edge enhancement reduction parameters appropriately to get the best
2949          quality images.
2950          </hal_details>
2951          <tag id="V1" />
2952          <tag id="REPROC" />
2953        </entry>
2954        <entry name="strength" type="byte">
2955          <description>Control the amount of edge enhancement
2956          applied to the images</description>
2957          <units>1-10; 10 is maximum sharpening</units>
2958          <tag id="FUTURE" />
2959        </entry>
2960      </controls>
2961      <static>
2962        <entry name="availableEdgeModes" type="byte" visibility="public"
2963               type_notes="list of enums" container="array" typedef="enumList"
2964               hwlevel="full">
2965          <array>
2966            <size>n</size>
2967          </array>
2968          <description>
2969          List of edge enhancement modes for android.edge.mode that are supported by this camera
2970          device.
2971          </description>
2972          <range>Any value listed in android.edge.mode</range>
2973          <details>
2974          Full-capability camera devices must always support OFF; camera devices that support
2975          YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will
2976          list FAST.
2977          </details>
2978          <hal_details>
2979          HAL must support both FAST and HIGH_QUALITY if edge enhancement control is available
2980          on the camera device, but the underlying implementation can be the same for both modes.
2981          That is, if the highest quality implementation on the camera device does not slow down
2982          capture rate, then FAST and HIGH_QUALITY will generate the same output.
2983          </hal_details>
2984          <tag id="V1" />
2985          <tag id="REPROC" />
2986        </entry>
2987      </static>
2988      <dynamic>
2989        <clone entry="android.edge.mode" kind="controls">
2990          <tag id="V1" />
2991          <tag id="REPROC" />
2992        </clone>
2993      </dynamic>
2994    </section>
2995    <section name="flash">
2996      <controls>
2997        <entry name="firingPower" type="byte">
2998          <description>Power for flash firing/torch</description>
2999          <units>10 is max power; 0 is no flash. Linear</units>
3000          <range>0 - 10</range>
3001          <details>Power for snapshot may use a different scale than
3002          for torch mode. Only one entry for torch mode will be
3003          used</details>
3004          <tag id="FUTURE" />
3005        </entry>
3006        <entry name="firingTime" type="int64">
3007          <description>Firing time of flash relative to start of
3008          exposure</description>
3009          <units>nanoseconds</units>
3010          <range>0-(exposure time-flash duration)</range>
3011          <details>Clamped to (0, exposure time - flash
3012          duration).</details>
3013          <tag id="FUTURE" />
3014        </entry>
3015        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="legacy">
3016          <enum>
3017            <value>OFF
3018              <notes>
3019              Do not fire the flash for this capture.
3020              </notes>
3021            </value>
3022            <value>SINGLE
3023              <notes>
3024              If the flash is available and charged, fire flash
3025              for this capture.
3026              </notes>
3027            </value>
3028            <value>TORCH
3029              <notes>
3030              Transition flash to continuously on.
3031              </notes>
3032            </value>
3033          </enum>
3034          <description>The desired mode for for the camera device's flash control.</description>
3035          <details>
3036          This control is only effective when flash unit is available
3037          (`android.flash.info.available == true`).
3038
3039          When this control is used, the android.control.aeMode must be set to ON or OFF.
3040          Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH,
3041          ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.
3042
3043          When set to OFF, the camera device will not fire flash for this capture.
3044
3045          When set to SINGLE, the camera device will fire flash regardless of the camera
3046          device's auto-exposure routine's result. When used in still capture case, this
3047          control should be used along with auto-exposure (AE) precapture metering sequence
3048          (android.control.aePrecaptureTrigger), otherwise, the image may be incorrectly exposed.
3049
3050          When set to TORCH, the flash will be on continuously. This mode can be used
3051          for use cases such as preview, auto-focus assist, still capture, or video recording.
3052
3053          The flash status will be reported by android.flash.state in the capture result metadata.
3054          </details>
3055          <tag id="BC" />
3056        </entry>
3057      </controls>
3058      <static>
3059        <namespace name="info">
3060          <entry name="available" type="byte" visibility="public" enum="true"
3061                 typedef="boolean" hwlevel="legacy">
3062            <enum>
3063              <value>FALSE</value>
3064              <value>TRUE</value>
3065            </enum>
3066            <description>Whether this camera device has a
3067            flash unit.</description>
3068            <details>
3069            Will be `false` if no flash is available.
3070
3071            If there is no flash unit, none of the flash controls do
3072            anything.</details>
3073            <tag id="BC" />
3074          </entry>
3075          <entry name="chargeDuration" type="int64">
3076            <description>Time taken before flash can fire
3077            again</description>
3078            <units>nanoseconds</units>
3079            <range>0-1e9</range>
3080            <details>1 second too long/too short for recharge? Should
3081            this be power-dependent?</details>
3082            <tag id="FUTURE" />
3083          </entry>
3084        </namespace>
3085        <entry name="colorTemperature" type="byte">
3086          <description>The x,y whitepoint of the
3087          flash</description>
3088          <units>pair of floats</units>
3089          <range>0-1 for both</range>
3090          <tag id="FUTURE" />
3091        </entry>
3092        <entry name="maxEnergy" type="byte">
3093          <description>Max energy output of the flash for a full
3094          power single flash</description>
3095          <units>lumen-seconds</units>
3096          <range>&amp;gt;= 0</range>
3097          <tag id="FUTURE" />
3098        </entry>
3099      </static>
3100      <dynamic>
3101        <clone entry="android.flash.firingPower" kind="controls">
3102        </clone>
3103        <clone entry="android.flash.firingTime" kind="controls">
3104        </clone>
3105        <clone entry="android.flash.mode" kind="controls"></clone>
3106        <entry name="state" type="byte" visibility="public" enum="true"
3107               hwlevel="limited">
3108          <enum>
3109            <value>UNAVAILABLE
3110            <notes>No flash on camera.</notes></value>
3111            <value>CHARGING
3112            <notes>Flash is charging and cannot be fired.</notes></value>
3113            <value>READY
3114            <notes>Flash is ready to fire.</notes></value>
3115            <value>FIRED
3116            <notes>Flash fired for this capture.</notes></value>
3117            <value>PARTIAL
3118            <notes>Flash partially illuminated this frame.
3119
3120            This is usually due to the next or previous frame having
3121            the flash fire, and the flash spilling into this capture
3122            due to hardware limitations.</notes></value>
3123          </enum>
3124          <description>Current state of the flash
3125          unit.</description>
3126          <details>
3127          When the camera device doesn't have flash unit
3128          (i.e. `android.flash.info.available == false`), this state will always be UNAVAILABLE.
3129          Other states indicate the current flash status.
3130
3131          In certain conditions, this will be available on LEGACY devices:
3132
3133           * Flash-less cameras always return UNAVAILABLE.
3134           * Using android.control.aeMode `==` ON_ALWAYS_FLASH
3135             will always return FIRED.
3136           * Using android.flash.mode `==` TORCH
3137             will always return FIRED.
3138
3139          In all other conditions the state will not be available on
3140          LEGACY devices (i.e. it will be `null`).
3141          </details>
3142        </entry>
3143      </dynamic>
3144    </section>
3145    <section name="hotPixel">
3146      <controls>
3147        <entry name="mode" type="byte" visibility="public" enum="true">
3148          <enum>
3149            <value>OFF
3150              <notes>
3151              No hot pixel correction is applied.
3152
3153              The frame rate must not be reduced relative to sensor raw output
3154              for this option.
3155
3156              The hotpixel map may be returned in android.statistics.hotPixelMap.
3157              </notes>
3158            </value>
3159            <value>FAST
3160              <notes>
3161              Hot pixel correction is applied, without reducing frame
3162              rate relative to sensor raw output.
3163
3164              The hotpixel map may be returned in android.statistics.hotPixelMap.
3165              </notes>
3166            </value>
3167            <value>HIGH_QUALITY
3168              <notes>
3169              High-quality hot pixel correction is applied, at a cost
3170              of possibly reduced frame rate relative to sensor raw output.
3171
3172              The hotpixel map may be returned in android.statistics.hotPixelMap.
3173              </notes>
3174            </value>
3175          </enum>
3176          <description>
3177          Operational mode for hot pixel correction.
3178          </description>
3179          <range>android.hotPixel.availableHotPixelModes</range>
3180          <details>
3181          Hotpixel correction interpolates out, or otherwise removes, pixels
3182          that do not accurately measure the incoming light (i.e. pixels that
3183          are stuck at an arbitrary value or are oversensitive).
3184          </details>
3185          <tag id="V1" />
3186          <tag id="RAW" />
3187        </entry>
3188      </controls>
3189      <static>
3190        <entry name="availableHotPixelModes" type="byte" visibility="public"
3191          type_notes="list of enums" container="array" typedef="enumList">
3192          <array>
3193            <size>n</size>
3194          </array>
3195          <description>
3196          List of hot pixel correction modes for android.hotPixel.mode that are supported by this
3197          camera device.
3198          </description>
3199          <range>Any value listed in android.hotPixel.mode</range>
3200          <details>
3201          FULL mode camera devices will always support FAST.
3202          </details>
3203          <hal_details>
3204          To avoid performance issues, there will be significantly fewer hot
3205          pixels than actual pixels on the camera sensor.
3206          HAL must support both FAST and HIGH_QUALITY if hot pixel correction control is available
3207          on the camera device, but the underlying implementation can be the same for both modes.
3208          That is, if the highest quality implementation on the camera device does not slow down
3209          capture rate, then FAST and HIGH_QUALITY will generate the same output.
3210          </hal_details>
3211          <tag id="V1" />
3212          <tag id="RAW" />
3213        </entry>
3214      </static>
3215      <dynamic>
3216        <clone entry="android.hotPixel.mode" kind="controls">
3217          <tag id="V1" />
3218          <tag id="RAW" />
3219        </clone>
3220      </dynamic>
3221    </section>
3222    <section name="jpeg">
3223      <controls>
3224        <entry name="gpsLocation" type="byte" visibility="java_public" synthetic="true"
3225        typedef="location" hwlevel="legacy">
3226          <description>
3227          A location object to use when generating image GPS metadata.
3228          </description>
3229          <details>
3230          Setting a location object in a request will include the GPS coordinates of the location
3231          into any JPEG images captured based on the request. These coordinates can then be
3232          viewed by anyone who receives the JPEG image.
3233          </details>
3234        </entry>
3235        <entry name="gpsCoordinates" type="double" visibility="ndk_public"
3236        type_notes="latitude, longitude, altitude. First two in degrees, the third in meters"
3237        container="array" hwlevel="legacy">
3238          <array>
3239            <size>3</size>
3240          </array>
3241          <description>GPS coordinates to include in output JPEG
3242          EXIF.</description>
3243          <range>(-180 - 180], [-90,90], [-inf, inf]</range>
3244          <tag id="BC" />
3245        </entry>
3246        <entry name="gpsProcessingMethod" type="byte" visibility="ndk_public"
3247               typedef="string" hwlevel="legacy">
3248          <description>32 characters describing GPS algorithm to
3249          include in EXIF.</description>
3250          <units>UTF-8 null-terminated string</units>
3251          <tag id="BC" />
3252        </entry>
3253        <entry name="gpsTimestamp" type="int64" visibility="ndk_public" hwlevel="legacy">
3254          <description>Time GPS fix was made to include in
3255          EXIF.</description>
3256          <units>UTC in seconds since January 1, 1970</units>
3257          <tag id="BC" />
3258        </entry>
3259        <entry name="orientation" type="int32" visibility="public" hwlevel="legacy">
3260          <description>The orientation for a JPEG image.</description>
3261          <units>Degrees in multiples of 90</units>
3262          <range>0, 90, 180, 270</range>
3263          <details>
3264          The clockwise rotation angle in degrees, relative to the orientation
3265          to the camera, that the JPEG picture needs to be rotated by, to be viewed
3266          upright.
3267
3268          Camera devices may either encode this value into the JPEG EXIF header, or
3269          rotate the image data to match this orientation. When the image data is rotated,
3270          the thumbnail data will also be rotated.
3271
3272          Note that this orientation is relative to the orientation of the camera sensor, given
3273          by android.sensor.orientation.
3274
3275          To translate from the device orientation given by the Android sensor APIs, the following
3276          sample code may be used:
3277
3278              private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
3279                  if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
3280                  int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
3281
3282                  // Round device orientation to a multiple of 90
3283                  deviceOrientation = (deviceOrientation + 45) / 90 * 90;
3284
3285                  // Reverse device orientation for front-facing cameras
3286                  boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
3287                  if (facingFront) deviceOrientation = -deviceOrientation;
3288
3289                  // Calculate desired JPEG orientation relative to camera orientation to make
3290                  // the image upright relative to the device orientation
3291                  int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
3292
3293                  return jpegOrientation;
3294              }
3295          </details>
3296          <tag id="BC" />
3297        </entry>
3298        <entry name="quality" type="byte" visibility="public" hwlevel="legacy">
3299          <description>Compression quality of the final JPEG
3300          image.</description>
3301          <range>1-100; larger is higher quality</range>
3302          <details>85-95 is typical usage range.</details>
3303          <tag id="BC" />
3304        </entry>
3305        <entry name="thumbnailQuality" type="byte" visibility="public" hwlevel="legacy">
3306          <description>Compression quality of JPEG
3307          thumbnail.</description>
3308          <range>1-100; larger is higher quality</range>
3309          <tag id="BC" />
3310        </entry>
3311        <entry name="thumbnailSize" type="int32" visibility="public"
3312        container="array" typedef="size" hwlevel="legacy">
3313          <array>
3314            <size>2</size>
3315          </array>
3316          <description>Resolution of embedded JPEG thumbnail.</description>
3317          <range>android.jpeg.availableThumbnailSizes</range>
3318          <details>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
3319          but the captured JPEG will still be a valid image.
3320
3321          For best results, when issuing a request for a JPEG image, the thumbnail size selected
3322          should have the same aspect ratio as the main JPEG output.
3323
3324          If the thumbnail image aspect ratio differs from the JPEG primary image aspect
3325          ratio, the camera device creates the thumbnail by cropping it from the primary image.
3326          For example, if the primary image has 4:3 aspect ratio, the thumbnail image has
3327          16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to
3328          generate the thumbnail image. The thumbnail image will always have a smaller Field
3329          Of View (FOV) than the primary image when aspect ratios differ.
3330
3331          When an android.jpeg.orientation of non-zero degree is requested,
3332          the camera device will handle thumbnail rotation in one of the following ways:
3333
3334          * Set the
3335            [EXIF orientation flag](https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION)
3336            and keep jpeg and thumbnail image data unrotated.
3337          * Rotate the jpeg and thumbnail image data and not set
3338            [EXIF orientation flag](https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION).
3339            In this case, LIMITED or FULL hardware level devices will report rotated thumnail size
3340            in capture result, so the width and height will be interchanged if 90 or 270 degree
3341            orientation is requested. LEGACY device will always report unrotated thumbnail size.
3342          </details>
3343          <hal_details>
3344          The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail.
3345          The cropping must be done on the primary jpeg image rather than the sensor active array.
3346          The stream cropping rule specified by "S5. Cropping" in camera3.h doesn't apply to the
3347          thumbnail image cropping.
3348          </hal_details>
3349          <tag id="BC" />
3350        </entry>
3351      </controls>
3352      <static>
3353        <entry name="availableThumbnailSizes" type="int32" visibility="public"
3354        container="array" typedef="size" hwlevel="legacy">
3355          <array>
3356            <size>2</size>
3357            <size>n</size>
3358          </array>
3359          <description>List of JPEG thumbnail sizes for android.jpeg.thumbnailSize supported by this
3360          camera device.</description>
3361          <details>
3362          This list will include at least one non-zero resolution, plus `(0,0)` for indicating no
3363          thumbnail should be generated.
3364
3365          Below condiditions will be satisfied for this size list:
3366
3367          * The sizes will be sorted by increasing pixel area (width x height).
3368          If several resolutions have the same area, they will be sorted by increasing width.
3369          * The aspect ratio of the largest thumbnail size will be same as the
3370          aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations.
3371          The largest size is defined as the size that has the largest pixel area
3372          in a given size list.
3373          * Each output JPEG size in android.scaler.availableStreamConfigurations will have at least
3374          one corresponding size that has the same aspect ratio in availableThumbnailSizes,
3375          and vice versa.
3376          * All non-`(0, 0)` sizes will have non-zero widths and heights.</details>
3377          <tag id="BC" />
3378        </entry>
3379        <entry name="maxSize" type="int32" visibility="system">
3380          <description>Maximum size in bytes for the compressed
3381          JPEG buffer</description>
3382          <range>Must be large enough to fit any JPEG produced by
3383          the camera</range>
3384          <details>This is used for sizing the gralloc buffers for
3385          JPEG</details>
3386        </entry>
3387      </static>
3388      <dynamic>
3389        <clone entry="android.jpeg.gpsLocation" kind="controls">
3390        </clone>
3391        <clone entry="android.jpeg.gpsCoordinates" kind="controls">
3392        </clone>
3393        <clone entry="android.jpeg.gpsProcessingMethod"
3394        kind="controls"></clone>
3395        <clone entry="android.jpeg.gpsTimestamp" kind="controls">
3396        </clone>
3397        <clone entry="android.jpeg.orientation" kind="controls">
3398        </clone>
3399        <clone entry="android.jpeg.quality" kind="controls">
3400        </clone>
3401        <entry name="size" type="int32">
3402          <description>The size of the compressed JPEG image, in
3403          bytes</description>
3404          <range>&amp;gt;= 0</range>
3405          <details>If no JPEG output is produced for the request,
3406          this must be 0.
3407
3408          Otherwise, this describes the real size of the compressed
3409          JPEG image placed in the output stream.  More specifically,
3410          if android.jpeg.maxSize = 1000000, and a specific capture
3411          has android.jpeg.size = 500000, then the output buffer from
3412          the JPEG stream will be 1000000 bytes, of which the first
3413          500000 make up the real data.</details>
3414          <tag id="FUTURE" />
3415        </entry>
3416        <clone entry="android.jpeg.thumbnailQuality"
3417        kind="controls"></clone>
3418        <clone entry="android.jpeg.thumbnailSize" kind="controls">
3419        </clone>
3420      </dynamic>
3421    </section>
3422    <section name="lens">
3423      <controls>
3424        <entry name="aperture" type="float" visibility="public" hwlevel="full">
3425          <description>The desired lens aperture size, as a ratio of lens focal length to the
3426          effective aperture diameter.</description>
3427          <units>The f-number (f/N)</units>
3428          <range>android.lens.info.availableApertures</range>
3429          <details>Setting this value is only supported on the camera devices that have a variable
3430          aperture lens.
3431
3432          When this is supported and android.control.aeMode is OFF,
3433          this can be set along with android.sensor.exposureTime,
3434          android.sensor.sensitivity, and android.sensor.frameDuration
3435          to achieve manual exposure control.
3436
3437          The requested aperture value may take several frames to reach the
3438          requested value; the camera device will report the current (intermediate)
3439          aperture size in capture result metadata while the aperture is changing.
3440          While the aperture is still changing, android.lens.state will be set to MOVING.
3441
3442          When this is supported and android.control.aeMode is one of
3443          the ON modes, this will be overridden by the camera device
3444          auto-exposure algorithm, the overridden values are then provided
3445          back to the user in the corresponding result.</details>
3446          <tag id="V1" />
3447        </entry>
3448        <entry name="filterDensity" type="float" visibility="public" hwlevel="full">
3449          <description>
3450          The desired setting for the lens neutral density filter(s).
3451          </description>
3452          <units>Exposure Value (EV)</units>
3453          <range>android.lens.info.availableFilterDensities</range>
3454          <details>
3455          This control will not be supported on most camera devices.
3456
3457          Lens filters are typically used to lower the amount of light the
3458          sensor is exposed to (measured in steps of EV). As used here, an EV
3459          step is the standard logarithmic representation, which are
3460          non-negative, and inversely proportional to the amount of light
3461          hitting the sensor.  For example, setting this to 0 would result
3462          in no reduction of the incoming light, and setting this to 2 would
3463          mean that the filter is set to reduce incoming light by two stops
3464          (allowing 1/4 of the prior amount of light to the sensor).
3465
3466          It may take several frames before the lens filter density changes
3467          to the requested value. While the filter density is still changing,
3468          android.lens.state will be set to MOVING.
3469          </details>
3470          <tag id="V1" />
3471        </entry>
3472        <entry name="focalLength" type="float" visibility="public" hwlevel="legacy">
3473          <description>
3474          The desired lens focal length; used for optical zoom.
3475          </description>
3476          <units>Millimeters</units>
3477          <range>android.lens.info.availableFocalLengths</range>
3478          <details>
3479          This setting controls the physical focal length of the camera
3480          device's lens. Changing the focal length changes the field of
3481          view of the camera device, and is usually used for optical zoom.
3482
3483          Like android.lens.focusDistance and android.lens.aperture, this
3484          setting won't be applied instantaneously, and it may take several
3485          frames before the lens can change to the requested focal length.
3486          While the focal length is still changing, android.lens.state will
3487          be set to MOVING.
3488
3489          Optical zoom will not be supported on most devices.
3490          </details>
3491          <tag id="V1" />
3492        </entry>
3493        <entry name="focusDistance" type="float" visibility="public" hwlevel="full">
3494          <description>Desired distance to plane of sharpest focus,
3495          measured from frontmost surface of the lens.</description>
3496          <units>See android.lens.info.focusDistanceCalibration for details</units>
3497          <range>&amp;gt;= 0</range>
3498          <details>
3499          This control can be used for setting manual focus, on devices that support
3500          the MANUAL_SENSOR capability and have a variable-focus lens (see
3501          android.lens.info.minimumFocusDistance).
3502
3503          A value of `0.0f` means infinity focus. The value set will be clamped to
3504          `[0.0f, android.lens.info.minimumFocusDistance]`.
3505
3506          Like android.lens.focalLength, this setting won't be applied
3507          instantaneously, and it may take several frames before the lens
3508          can move to the requested focus distance. While the lens is still moving,
3509          android.lens.state will be set to MOVING.
3510
3511          LEGACY devices support at most setting this to `0.0f`
3512          for infinity focus.
3513          </details>
3514          <tag id="BC" />
3515          <tag id="V1" />
3516        </entry>
3517        <entry name="opticalStabilizationMode" type="byte" visibility="public"
3518        enum="true" hwlevel="limited">
3519          <enum>
3520            <value>OFF
3521              <notes>Optical stabilization is unavailable.</notes>
3522            </value>
3523            <value optional="true">ON
3524              <notes>Optical stabilization is enabled.</notes>
3525            </value>
3526          </enum>
3527          <description>
3528          Sets whether the camera device uses optical image stabilization (OIS)
3529          when capturing images.
3530          </description>
3531          <range>android.lens.info.availableOpticalStabilization</range>
3532          <details>
3533          OIS is used to compensate for motion blur due to small
3534          movements of the camera during capture. Unlike digital image
3535          stabilization (android.control.videoStabilizationMode), OIS
3536          makes use of mechanical elements to stabilize the camera
3537          sensor, and thus allows for longer exposure times before
3538          camera shake becomes apparent.
3539
3540          Switching between different optical stabilization modes may take several
3541          frames to initialize, the camera device will report the current mode in
3542          capture result metadata. For example, When "ON" mode is requested, the
3543          optical stabilization modes in the first several capture results may still
3544          be "OFF", and it will become "ON" when the initialization is done.
3545
3546          If a camera device supports both OIS and digital image stabilization
3547          (android.control.videoStabilizationMode), turning both modes on may produce undesirable
3548          interaction, so it is recommended not to enable both at the same time.
3549
3550          Not all devices will support OIS; see
3551          android.lens.info.availableOpticalStabilization for
3552          available controls.
3553          </details>
3554          <tag id="V1" />
3555        </entry>
3556      </controls>
3557      <static>
3558        <namespace name="info">
3559          <entry name="availableApertures" type="float" visibility="public"
3560          container="array" hwlevel="full">
3561            <array>
3562              <size>n</size>
3563            </array>
3564            <description>List of aperture size values for android.lens.aperture that are
3565            supported by this camera device.</description>
3566            <units>The aperture f-number</units>
3567            <details>If the camera device doesn't support a variable lens aperture,
3568            this list will contain only one value, which is the fixed aperture size.
3569
3570            If the camera device supports a variable aperture, the aperture values
3571            in this list will be sorted in ascending order.</details>
3572            <tag id="V1" />
3573          </entry>
3574          <entry name="availableFilterDensities" type="float" visibility="public"
3575          container="array" hwlevel="full">
3576            <array>
3577              <size>n</size>
3578            </array>
3579            <description>
3580            List of neutral density filter values for
3581            android.lens.filterDensity that are supported by this camera device.
3582            </description>
3583            <units>Exposure value (EV)</units>
3584            <range>
3585            Values are &amp;gt;= 0
3586            </range>
3587            <details>
3588            If a neutral density filter is not supported by this camera device,
3589            this list will contain only 0. Otherwise, this list will include every
3590            filter density supported by the camera device, in ascending order.
3591            </details>
3592            <tag id="V1" />
3593          </entry>
3594          <entry name="availableFocalLengths" type="float" visibility="public"
3595          type_notes="The list of available focal lengths"
3596          container="array" hwlevel="legacy">
3597            <array>
3598              <size>n</size>
3599            </array>
3600            <description>
3601            List of focal lengths for android.lens.focalLength that are supported by this camera
3602            device.
3603            </description>
3604            <units>Millimeters</units>
3605            <range>
3606            Values are &amp;gt; 0
3607            </range>
3608            <details>
3609            If optical zoom is not supported, this list will only contain
3610            a single value corresponding to the fixed focal length of the
3611            device. Otherwise, this list will include every focal length supported
3612            by the camera device, in ascending order.
3613            </details>
3614            <tag id="BC" />
3615            <tag id="V1" />
3616          </entry>
3617          <entry name="availableOpticalStabilization" type="byte"
3618          visibility="public" type_notes="list of enums" container="array"
3619          typedef="enumList" hwlevel="limited">
3620            <array>
3621              <size>n</size>
3622            </array>
3623            <description>
3624            List of optical image stabilization (OIS) modes for
3625            android.lens.opticalStabilizationMode that are supported by this camera device.
3626            </description>
3627            <range>Any value listed in android.lens.opticalStabilizationMode</range>
3628            <details>
3629            If OIS is not supported by a given camera device, this list will
3630            contain only OFF.
3631            </details>
3632            <tag id="V1" />
3633          </entry>
3634          <entry name="hyperfocalDistance" type="float" visibility="public" optional="true"
3635                 hwlevel="limited">
3636            <description>Hyperfocal distance for this lens.</description>
3637            <units>See android.lens.info.focusDistanceCalibration for details</units>
3638            <range>If lens is fixed focus, &amp;gt;= 0. If lens has focuser unit, the value is
3639            within `(0.0f, android.lens.info.minimumFocusDistance]`</range>
3640            <details>
3641            If the lens is not fixed focus, the camera device will report this
3642            field when android.lens.info.focusDistanceCalibration is APPROXIMATE or CALIBRATED.
3643            </details>
3644          </entry>
3645          <entry name="minimumFocusDistance" type="float" visibility="public" optional="true"
3646                 hwlevel="limited">
3647            <description>Shortest distance from frontmost surface
3648            of the lens that can be brought into sharp focus.</description>
3649            <units>See android.lens.info.focusDistanceCalibration for details</units>
3650            <range>&amp;gt;= 0</range>
3651            <details>If the lens is fixed-focus, this will be
3652            0.</details>
3653            <hal_details>Mandatory for FULL devices; LIMITED devices
3654            must always set this value to 0 for fixed-focus; and may omit
3655            the minimum focus distance otherwise.
3656
3657            This field is also mandatory for all devices advertising
3658            the MANUAL_SENSOR capability.</hal_details>
3659            <tag id="V1" />
3660          </entry>
3661          <entry name="shadingMapSize" type="int32" visibility="ndk_public"
3662                 type_notes="width and height (N, M) of lens shading map provided by the camera device."
3663                 container="array" typedef="size" hwlevel="full">
3664            <array>
3665              <size>2</size>
3666            </array>
3667            <description>Dimensions of lens shading map.</description>
3668            <range>Both values &amp;gt;= 1</range>
3669            <details>
3670            The map should be on the order of 30-40 rows and columns, and
3671            must be smaller than 64x64.
3672            </details>
3673            <tag id="V1" />
3674          </entry>
3675          <entry name="focusDistanceCalibration" type="byte" visibility="public"
3676                 enum="true" hwlevel="limited">
3677            <enum>
3678              <value>UNCALIBRATED
3679                <notes>
3680                The lens focus distance is not accurate, and the units used for
3681                android.lens.focusDistance do not correspond to any physical units.
3682
3683                Setting the lens to the same focus distance on separate occasions may
3684                result in a different real focus distance, depending on factors such
3685                as the orientation of the device, the age of the focusing mechanism,
3686                and the device temperature. The focus distance value will still be
3687                in the range of `[0, android.lens.info.minimumFocusDistance]`, where 0
3688                represents the farthest focus.
3689                </notes>
3690              </value>
3691              <value>APPROXIMATE
3692                <notes>
3693                The lens focus distance is measured in diopters.
3694
3695                However, setting the lens to the same focus distance
3696                on separate occasions may result in a different real
3697                focus distance, depending on factors such as the
3698                orientation of the device, the age of the focusing
3699                mechanism, and the device temperature.
3700                </notes>
3701              </value>
3702              <value>CALIBRATED
3703                <notes>
3704                The lens focus distance is measured in diopters, and
3705                is calibrated.
3706
3707                The lens mechanism is calibrated so that setting the
3708                same focus distance is repeatable on multiple
3709                occasions with good accuracy, and the focus distance
3710                corresponds to the real physical distance to the plane
3711                of best focus.
3712                </notes>
3713              </value>
3714            </enum>
3715            <description>The lens focus distance calibration quality.</description>
3716            <details>
3717            The lens focus distance calibration quality determines the reliability of
3718            focus related metadata entries, i.e. android.lens.focusDistance,
3719            android.lens.focusRange, android.lens.info.hyperfocalDistance, and
3720            android.lens.info.minimumFocusDistance.
3721
3722            APPROXIMATE and CALIBRATED devices report the focus metadata in
3723            units of diopters (1/meter), so `0.0f` represents focusing at infinity,
3724            and increasing positive numbers represent focusing closer and closer
3725            to the camera device. The focus distance control also uses diopters
3726            on these devices.
3727
3728            UNCALIBRATED devices do not use units that are directly comparable
3729            to any real physical measurement, but `0.0f` still represents farthest
3730            focus, and android.lens.info.minimumFocusDistance represents the
3731            nearest focus the device can achieve.
3732            </details>
3733            <hal_details>
3734            For devices advertise APPROXIMATE quality or higher, diopters 0 (infinity
3735            focus) must work. When autofocus is disabled (android.control.afMode == OFF)
3736            and the lens focus distance is set to 0 diopters
3737            (android.lens.focusDistance == 0), the lens will move to focus at infinity
3738            and is stably focused at infinity even if the device tilts. It may take the
3739            lens some time to move; during the move the lens state should be MOVING and
3740            the output diopter value should be changing toward 0.
3741            </hal_details>
3742          <tag id="V1" />
3743        </entry>
3744        </namespace>
3745        <entry name="facing" type="byte" visibility="public" enum="true" hwlevel="legacy">
3746          <enum>
3747            <value>FRONT
3748            <notes>
3749              The camera device faces the same direction as the device's screen.
3750            </notes></value>
3751            <value>BACK
3752            <notes>
3753              The camera device faces the opposite direction as the device's screen.
3754            </notes></value>
3755            <value>EXTERNAL
3756            <notes>
3757              The camera device is an external camera, and has no fixed facing relative to the
3758              device's screen.
3759            </notes></value>
3760          </enum>
3761          <description>Direction the camera faces relative to
3762          device screen.</description>
3763        </entry>
3764        <entry name="poseRotation" type="float" visibility="public"
3765               container="array">
3766          <array>
3767            <size>4</size>
3768          </array>
3769          <description>
3770            The orientation of the camera relative to the sensor
3771            coordinate system.
3772          </description>
3773          <units>
3774            Quaternion coefficients
3775          </units>
3776          <details>
3777            The four coefficients that describe the quaternion
3778            rotation from the Android sensor coordinate system to a
3779            camera-aligned coordinate system where the X-axis is
3780            aligned with the long side of the image sensor, the Y-axis
3781            is aligned with the short side of the image sensor, and
3782            the Z-axis is aligned with the optical axis of the sensor.
3783
3784            To convert from the quaternion coefficients `(x,y,z,w)`
3785            to the axis of rotation `(a_x, a_y, a_z)` and rotation
3786            amount `theta`, the following formulas can be used:
3787
3788                 theta = 2 * acos(w)
3789                a_x = x / sin(theta/2)
3790                a_y = y / sin(theta/2)
3791                a_z = z / sin(theta/2)
3792
3793            To create a 3x3 rotation matrix that applies the rotation
3794            defined by this quaternion, the following matrix can be
3795            used:
3796
3797                R = [ 1 - 2y^2 - 2z^2,       2xy - 2zw,       2xz + 2yw,
3798                           2xy + 2zw, 1 - 2x^2 - 2z^2,       2yz - 2xw,
3799                           2xz - 2yw,       2yz + 2xw, 1 - 2x^2 - 2y^2 ]
3800
3801             This matrix can then be used to apply the rotation to a
3802             column vector point with
3803
3804               `p' = Rp`
3805
3806             where `p` is in the device sensor coordinate system, and
3807             `p'` is in the camera-oriented coordinate system.
3808          </details>
3809          <tag id="DEPTH" />
3810        </entry>
3811        <entry name="poseTranslation" type="float" visibility="public"
3812               container="array">
3813          <array>
3814            <size>3</size>
3815          </array>
3816          <description>Position of the camera optical center.</description>
3817          <units>Meters</units>
3818          <details>
3819            The position of the camera device's lens optical center,
3820            as a three-dimensional vector `(x,y,z)`, relative to the
3821            optical center of the largest camera device facing in the
3822            same direction as this camera, in the
3823            [Android sensor coordinate axes](https://developer.android.com/reference/android/hardware/SensorEvent.html).
3824            Note that only the axis definitions are shared with
3825            the sensor coordinate system, but not the origin.
3826
3827            If this device is the largest or only camera device with a
3828            given facing, then this position will be `(0, 0, 0)`; a
3829            camera device with a lens optical center located 3 cm from
3830            the main sensor along the +X axis (to the right from the
3831            user's perspective) will report `(0.03, 0, 0)`.
3832
3833            To transform a pixel coordinates between two cameras
3834            facing the same direction, first the source camera
3835            android.lens.radialDistortion must be corrected for.  Then
3836            the source camera android.lens.intrinsicCalibration needs
3837            to be applied, followed by the android.lens.poseRotation
3838            of the source camera, the translation of the source camera
3839            relative to the destination camera, the
3840            android.lens.poseRotation of the destination camera, and
3841            finally the inverse of android.lens.intrinsicCalibration
3842            of the destination camera. This obtains a
3843            radial-distortion-free coordinate in the destination
3844            camera pixel coordinates.
3845
3846            To compare this against a real image from the destination
3847            camera, the destination camera image then needs to be
3848            corrected for radial distortion before comparison or
3849            sampling.
3850          </details>
3851          <tag id="DEPTH" />
3852        </entry>
3853      </static>
3854      <dynamic>
3855        <clone entry="android.lens.aperture" kind="controls">
3856          <tag id="V1" />
3857        </clone>
3858        <clone entry="android.lens.filterDensity" kind="controls">
3859          <tag id="V1" />
3860        </clone>
3861        <clone entry="android.lens.focalLength" kind="controls">
3862          <tag id="BC" />
3863        </clone>
3864        <clone entry="android.lens.focusDistance" kind="controls">
3865          <details>Should be zero for fixed-focus cameras</details>
3866          <tag id="BC" />
3867        </clone>
3868        <entry name="focusRange" type="float" visibility="public"
3869        type_notes="Range of scene distances that are in focus"
3870        container="array" typedef="pairFloatFloat" hwlevel="limited">
3871          <array>
3872            <size>2</size>
3873          </array>
3874          <description>The range of scene distances that are in
3875          sharp focus (depth of field).</description>
3876          <units>A pair of focus distances in diopters: (near,
3877          far); see android.lens.info.focusDistanceCalibration for details.</units>
3878          <range>&amp;gt;=0</range>
3879          <details>If variable focus not supported, can still report
3880          fixed depth of field range</details>
3881          <tag id="BC" />
3882        </entry>
3883        <clone entry="android.lens.opticalStabilizationMode"
3884        kind="controls">
3885          <tag id="V1" />
3886        </clone>
3887        <entry name="state" type="byte" visibility="public" enum="true" hwlevel="limited">
3888          <enum>
3889            <value>STATIONARY
3890              <notes>
3891              The lens parameters (android.lens.focalLength, android.lens.focusDistance,
3892              android.lens.filterDensity and android.lens.aperture) are not changing.
3893              </notes>
3894            </value>
3895            <value>MOVING
3896              <notes>
3897              One or several of the lens parameters
3898              (android.lens.focalLength, android.lens.focusDistance,
3899              android.lens.filterDensity or android.lens.aperture) is
3900              currently changing.
3901              </notes>
3902            </value>
3903          </enum>
3904          <description>Current lens status.</description>
3905          <details>
3906          For lens parameters android.lens.focalLength, android.lens.focusDistance,
3907          android.lens.filterDensity and android.lens.aperture, when changes are requested,
3908          they may take several frames to reach the requested values. This state indicates
3909          the current status of the lens parameters.
3910
3911          When the state is STATIONARY, the lens parameters are not changing. This could be
3912          either because the parameters are all fixed, or because the lens has had enough
3913          time to reach the most recently-requested values.
3914          If all these lens parameters are not changable for a camera device, as listed below:
3915
3916          * Fixed focus (`android.lens.info.minimumFocusDistance == 0`), which means
3917          android.lens.focusDistance parameter will always be 0.
3918          * Fixed focal length (android.lens.info.availableFocalLengths contains single value),
3919          which means the optical zoom is not supported.
3920          * No ND filter (android.lens.info.availableFilterDensities contains only 0).
3921          * Fixed aperture (android.lens.info.availableApertures contains single value).
3922
3923          Then this state will always be STATIONARY.
3924
3925          When the state is MOVING, it indicates that at least one of the lens parameters
3926          is changing.
3927          </details>
3928          <tag id="V1" />
3929        </entry>
3930        <clone entry="android.lens.poseRotation" kind="static">
3931        </clone>
3932        <clone entry="android.lens.poseTranslation" kind="static">
3933        </clone>
3934      </dynamic>
3935      <static>
3936        <entry name="intrinsicCalibration" type="float" visibility="public"
3937               container="array">
3938          <array>
3939            <size>5</size>
3940          </array>
3941          <description>
3942            The parameters for this camera device's intrinsic
3943            calibration.
3944          </description>
3945          <units>
3946            Pixels in the
3947            android.sensor.info.preCorrectionActiveArraySize
3948            coordinate system.
3949          </units>
3950          <details>
3951            The five calibration parameters that describe the
3952            transform from camera-centric 3D coordinates to sensor
3953            pixel coordinates:
3954
3955                [f_x, f_y, c_x, c_y, s]
3956
3957            Where `f_x` and `f_y` are the horizontal and vertical
3958            focal lengths, `[c_x, c_y]` is the position of the optical
3959            axis, and `s` is a skew parameter for the sensor plane not
3960            being aligned with the lens plane.
3961
3962            These are typically used within a transformation matrix K:
3963
3964                K = [ f_x,   s, c_x,
3965                       0, f_y, c_y,
3966                       0    0,   1 ]
3967
3968            which can then be combined with the camera pose rotation
3969            `R` and translation `t` (android.lens.poseRotation and
3970            android.lens.poseTranslation, respective) to calculate the
3971            complete transform from world coordinates to pixel
3972            coordinates:
3973
3974                P = [ K 0   * [ R t
3975                     0 1 ]     0 1 ]
3976
3977            and with `p_w` being a point in the world coordinate system
3978            and `p_s` being a point in the camera active pixel array
3979            coordinate system, and with the mapping including the
3980            homogeneous division by z:
3981
3982                 p_h = (x_h, y_h, z_h) = P p_w
3983                p_s = p_h / z_h
3984
3985            so `[x_s, y_s]` is the pixel coordinates of the world
3986            point, `z_s = 1`, and `w_s` is a measurement of disparity
3987            (depth) in pixel coordinates.
3988
3989            Note that the coordinate system for this transform is the
3990            android.sensor.info.preCorrectionActiveArraySize system,
3991            where `(0,0)` is the top-left of the
3992            preCorrectionActiveArraySize rectangle. Once the pose and
3993            intrinsic calibration transforms have been applied to a
3994            world point, then the android.lens.radialDistortion
3995            transform needs to be applied, and the result adjusted to
3996            be in the android.sensor.info.activeArraySize coordinate
3997            system (where `(0, 0)` is the top-left of the
3998            activeArraySize rectangle), to determine the final pixel
3999            coordinate of the world point for processed (non-RAW)
4000            output buffers.
4001          </details>
4002          <tag id="DEPTH" />
4003        </entry>
4004        <entry name="radialDistortion" type="float" visibility="public"
4005               container="array">
4006          <array>
4007            <size>6</size>
4008          </array>
4009          <description>
4010            The correction coefficients to correct for this camera device's
4011            radial and tangential lens distortion.
4012          </description>
4013          <units>
4014            Unitless coefficients.
4015          </units>
4016          <details>
4017            Four radial distortion coefficients `[kappa_0, kappa_1, kappa_2,
4018            kappa_3]` and two tangential distortion coefficients
4019            `[kappa_4, kappa_5]` that can be used to correct the
4020            lens's geometric distortion with the mapping equations:
4021
4022                 x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
4023                       kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
4024                 y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
4025                       kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
4026
4027            Here, `[x_c, y_c]` are the coordinates to sample in the
4028            input image that correspond to the pixel values in the
4029            corrected image at the coordinate `[x_i, y_i]`:
4030
4031                 correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
4032
4033            The pixel coordinates are defined in a normalized
4034            coordinate system related to the
4035            android.lens.intrinsicCalibration calibration fields.
4036            Both `[x_i, y_i]` and `[x_c, y_c]` have `(0,0)` at the
4037            lens optical center `[c_x, c_y]`. The maximum magnitudes
4038            of both x and y coordinates are normalized to be 1 at the
4039            edge further from the optical center, so the range
4040            for both dimensions is `-1 &lt;= x &lt;= 1`.
4041
4042            Finally, `r` represents the radial distance from the
4043            optical center, `r^2 = x_i^2 + y_i^2`, and its magnitude
4044            is therefore no larger than `|r| &lt;= sqrt(2)`.
4045
4046            The distortion model used is the Brown-Conrady model.
4047          </details>
4048          <tag id="DEPTH" />
4049        </entry>
4050      </static>
4051      <dynamic>
4052        <clone entry="android.lens.intrinsicCalibration" kind="static">
4053        </clone>
4054        <clone entry="android.lens.radialDistortion" kind="static">
4055        </clone>
4056      </dynamic>
4057    </section>
4058    <section name="noiseReduction">
4059      <controls>
4060        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
4061          <enum>
4062            <value>OFF
4063            <notes>No noise reduction is applied.</notes></value>
4064            <value>FAST
4065            <notes>Noise reduction is applied without reducing frame rate relative to sensor
4066            output. It may be the same as OFF if noise reduction will reduce frame rate
4067            relative to sensor.</notes></value>
4068            <value>HIGH_QUALITY
4069            <notes>High-quality noise reduction is applied, at the cost of possibly reduced frame
4070            rate relative to sensor output.</notes></value>
4071            <value optional="true">MINIMAL
4072            <notes>MINIMAL noise reduction is applied without reducing frame rate relative to
4073            sensor output. </notes></value>
4074            <value optional="true">ZERO_SHUTTER_LAG
4075
4076            <notes>Noise reduction is applied at different levels for different output streams,
4077            based on resolution. Streams at maximum recording resolution (see {@link
4078            ACameraDevice_createCaptureSession}) or below have noise
4079            reduction applied, while higher-resolution streams have MINIMAL (if supported) or no
4080            noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction
4081            for low-resolution streams is tuned so that frame rate is not impacted, and the quality
4082            is equal to or better than FAST (since it is only applied to lower-resolution outputs,
4083            quality may improve from FAST).
4084
4085            This mode is intended to be used by applications operating in a zero-shutter-lag mode
4086            with YUV or PRIVATE reprocessing, where the application continuously captures
4087            high-resolution intermediate buffers into a circular buffer, from which a final image is
4088            produced via reprocessing when a user takes a picture.  For such a use case, the
4089            high-resolution buffers must not have noise reduction applied to maximize efficiency of
4090            preview and to avoid over-applying noise filtering when reprocessing, while
4091            low-resolution buffers (used for recording or preview, generally) need noise reduction
4092            applied for reasonable preview quality.
4093
4094            This mode is guaranteed to be supported by devices that support either the
4095            YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
4096            (android.request.availableCapabilities lists either of those capabilities) and it will
4097            be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
4098            </notes></value>
4099          </enum>
4100          <description>Mode of operation for the noise reduction algorithm.</description>
4101          <range>android.noiseReduction.availableNoiseReductionModes</range>
4102          <details>The noise reduction algorithm attempts to improve image quality by removing
4103          excessive noise added by the capture process, especially in dark conditions.
4104
4105          OFF means no noise reduction will be applied by the camera device, for both raw and
4106          YUV domain.
4107
4108          MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove
4109          demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF.
4110          This mode is optional, may not be support by all devices. The application should check
4111          android.noiseReduction.availableNoiseReductionModes before using it.
4112
4113          FAST/HIGH_QUALITY both mean camera device determined noise filtering
4114          will be applied. HIGH_QUALITY mode indicates that the camera device
4115          will use the highest-quality noise filtering algorithms,
4116          even if it slows down capture rate. FAST means the camera device will not
4117          slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if
4118          MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate.
4119          Every output stream will have a similar amount of enhancement applied.
4120
4121          ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
4122          buffer of high-resolution images during preview and reprocess image(s) from that buffer
4123          into a final capture when triggered by the user. In this mode, the camera device applies
4124          noise reduction to low-resolution streams (below maximum recording resolution) to maximize
4125          preview quality, but does not apply noise reduction to high-resolution streams, since
4126          those will be reprocessed later if necessary.
4127
4128          For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device
4129          will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device
4130          may adjust the noise reduction parameters for best image quality based on the
4131          android.reprocess.effectiveExposureFactor if it is set.
4132          </details>
4133          <hal_details>
4134          For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to
4135          adjust the internal noise reduction parameters appropriately to get the best quality
4136          images.
4137          </hal_details>
4138          <tag id="V1" />
4139          <tag id="REPROC" />
4140        </entry>
4141        <entry name="strength" type="byte">
4142          <description>Control the amount of noise reduction
4143          applied to the images</description>
4144          <units>1-10; 10 is max noise reduction</units>
4145          <range>1 - 10</range>
4146          <tag id="FUTURE" />
4147        </entry>
4148      </controls>
4149      <static>
4150        <entry name="availableNoiseReductionModes" type="byte" visibility="public"
4151        type_notes="list of enums" container="array" typedef="enumList" hwlevel="limited">
4152          <array>
4153            <size>n</size>
4154          </array>
4155          <description>
4156          List of noise reduction modes for android.noiseReduction.mode that are supported
4157          by this camera device.
4158          </description>
4159          <range>Any value listed in android.noiseReduction.mode</range>
4160          <details>
4161          Full-capability camera devices will always support OFF and FAST.
4162
4163          Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support
4164          ZERO_SHUTTER_LAG.
4165
4166          Legacy-capability camera devices will only support FAST mode.
4167          </details>
4168          <hal_details>
4169          HAL must support both FAST and HIGH_QUALITY if noise reduction control is available
4170          on the camera device, but the underlying implementation can be the same for both modes.
4171          That is, if the highest quality implementation on the camera device does not slow down
4172          capture rate, then FAST and HIGH_QUALITY will generate the same output.
4173          </hal_details>
4174          <tag id="V1" />
4175          <tag id="REPROC" />
4176        </entry>
4177      </static>
4178      <dynamic>
4179        <clone entry="android.noiseReduction.mode" kind="controls">
4180          <tag id="V1" />
4181          <tag id="REPROC" />
4182        </clone>
4183      </dynamic>
4184    </section>
4185    <section name="quirks">
4186      <static>
4187        <entry name="meteringCropRegion" type="byte" visibility="system" deprecated="true" optional="true">
4188          <description>If set to 1, the camera service does not
4189          scale 'normalized' coordinates with respect to the crop
4190          region. This applies to metering input (a{e,f,wb}Region
4191          and output (face rectangles).</description>
4192          <details>Normalized coordinates refer to those in the
4193          (-1000,1000) range mentioned in the
4194          android.hardware.Camera API.
4195
4196          HAL implementations should instead always use and emit
4197          sensor array-relative coordinates for all region data. Does
4198          not need to be listed in static metadata. Support will be
4199          removed in future versions of camera service.</details>
4200        </entry>
4201        <entry name="triggerAfWithAuto" type="byte" visibility="system" deprecated="true" optional="true">
4202          <description>If set to 1, then the camera service always
4203          switches to FOCUS_MODE_AUTO before issuing a AF
4204          trigger.</description>
4205          <details>HAL implementations should implement AF trigger
4206          modes for AUTO, MACRO, CONTINUOUS_FOCUS, and
4207          CONTINUOUS_PICTURE modes instead of using this flag. Does
4208          not need to be listed in static metadata. Support will be
4209          removed in future versions of camera service</details>
4210        </entry>
4211        <entry name="useZslFormat" type="byte" visibility="system" deprecated="true" optional="true">
4212          <description>If set to 1, the camera service uses
4213          CAMERA2_PIXEL_FORMAT_ZSL instead of
4214          HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED for the zero
4215          shutter lag stream</description>
4216          <details>HAL implementations should use gralloc usage flags
4217          to determine that a stream will be used for
4218          zero-shutter-lag, instead of relying on an explicit
4219          format setting. Does not need to be listed in static
4220          metadata. Support will be removed in future versions of
4221          camera service.</details>
4222        </entry>
4223        <entry name="usePartialResult" type="byte" visibility="hidden" deprecated="true" optional="true">
4224          <description>
4225          If set to 1, the HAL will always split result
4226          metadata for a single capture into multiple buffers,
4227          returned using multiple process_capture_result calls.
4228          </description>
4229          <details>
4230          Does not need to be listed in static
4231          metadata. Support for partial results will be reworked in
4232          future versions of camera service. This quirk will stop
4233          working at that point; DO NOT USE without careful
4234          consideration of future support.
4235          </details>
4236          <hal_details>
4237          Refer to `camera3_capture_result::partial_result`
4238          for information on how to implement partial results.
4239          </hal_details>
4240        </entry>
4241      </static>
4242      <dynamic>
4243        <entry name="partialResult" type="byte" visibility="hidden" deprecated="true" optional="true" enum="true" typedef="boolean">
4244          <enum>
4245            <value>FINAL
4246            <notes>The last or only metadata result buffer
4247            for this capture.</notes>
4248            </value>
4249            <value>PARTIAL
4250            <notes>A partial buffer of result metadata for this
4251            capture. More result buffers for this capture will be sent
4252            by the camera device, the last of which will be marked
4253            FINAL.</notes>
4254            </value>
4255          </enum>
4256          <description>
4257          Whether a result given to the framework is the
4258          final one for the capture, or only a partial that contains a
4259          subset of the full set of dynamic metadata
4260          values.</description>
4261          <range>Optional. Default value is FINAL.</range>
4262          <details>
4263          The entries in the result metadata buffers for a
4264          single capture may not overlap, except for this entry. The
4265          FINAL buffers must retain FIFO ordering relative to the
4266          requests that generate them, so the FINAL buffer for frame 3 must
4267          always be sent to the framework after the FINAL buffer for frame 2, and
4268          before the FINAL buffer for frame 4. PARTIAL buffers may be returned
4269          in any order relative to other frames, but all PARTIAL buffers for a given
4270          capture must arrive before the FINAL buffer for that capture. This entry may
4271          only be used by the camera device if quirks.usePartialResult is set to 1.
4272          </details>
4273          <hal_details>
4274          Refer to `camera3_capture_result::partial_result`
4275          for information on how to implement partial results.
4276          </hal_details>
4277        </entry>
4278      </dynamic>
4279    </section>
4280    <section name="request">
4281      <controls>
4282        <entry name="frameCount" type="int32" visibility="system" deprecated="true">
4283          <description>A frame counter set by the framework. Must
4284          be maintained unchanged in output frame. This value monotonically
4285          increases with every new result (that is, each new result has a unique
4286          frameCount value).
4287          </description>
4288          <units>incrementing integer</units>
4289          <range>Any int.</range>
4290        </entry>
4291        <entry name="id" type="int32" visibility="hidden">
4292          <description>An application-specified ID for the current
4293          request. Must be maintained unchanged in output
4294          frame</description>
4295          <units>arbitrary integer assigned by application</units>
4296          <range>Any int</range>
4297          <tag id="V1" />
4298        </entry>
4299        <entry name="inputStreams" type="int32" visibility="system" deprecated="true"
4300               container="array">
4301          <array>
4302            <size>n</size>
4303          </array>
4304          <description>List which camera reprocess stream is used
4305          for the source of reprocessing data.</description>
4306          <units>List of camera reprocess stream IDs</units>
4307          <range>
4308          Typically, only one entry allowed, must be a valid reprocess stream ID.
4309          </range>
4310          <details>Only meaningful when android.request.type ==
4311          REPROCESS. Ignored otherwise</details>
4312          <tag id="HAL2" />
4313        </entry>
4314        <entry name="metadataMode" type="byte" visibility="system"
4315               enum="true">
4316          <enum>
4317            <value>NONE
4318            <notes>No metadata should be produced on output, except
4319            for application-bound buffer data. If no
4320            application-bound streams exist, no frame should be
4321            placed in the output frame queue. If such streams
4322            exist, a frame should be placed on the output queue
4323            with null metadata but with the necessary output buffer
4324            information. Timestamp information should still be
4325            included with any output stream buffers</notes></value>
4326            <value>FULL
4327            <notes>All metadata should be produced. Statistics will
4328            only be produced if they are separately
4329            enabled</notes></value>
4330          </enum>
4331          <description>How much metadata to produce on
4332          output</description>
4333          <tag id="FUTURE" />
4334        </entry>
4335        <entry name="outputStreams" type="int32" visibility="system" deprecated="true"
4336               container="array">
4337          <array>
4338            <size>n</size>
4339          </array>
4340          <description>Lists which camera output streams image data
4341          from this capture must be sent to</description>
4342          <units>List of camera stream IDs</units>
4343          <range>List must only include streams that have been
4344          created</range>
4345          <details>If no output streams are listed, then the image
4346          data should simply be discarded. The image data must
4347          still be captured for metadata and statistics production,
4348          and the lens and flash must operate as requested.</details>
4349          <tag id="HAL2" />
4350        </entry>
4351        <entry name="type" type="byte" visibility="system" deprecated="true" enum="true">
4352          <enum>
4353            <value>CAPTURE
4354            <notes>Capture a new image from the imaging hardware,
4355            and process it according to the
4356            settings</notes></value>
4357            <value>REPROCESS
4358            <notes>Process previously captured data; the
4359            android.request.inputStreams parameter determines the
4360            source reprocessing stream. TODO: Mark dynamic metadata
4361            needed for reprocessing with [RP]</notes></value>
4362          </enum>
4363          <description>The type of the request; either CAPTURE or
4364          REPROCESS. For HAL3, this tag is redundant.
4365          </description>
4366          <tag id="HAL2" />
4367        </entry>
4368      </controls>
4369      <static>
4370        <entry name="maxNumOutputStreams" type="int32" visibility="ndk_public"
4371               container="array" hwlevel="legacy">
4372          <array>
4373            <size>3</size>
4374          </array>
4375          <description>The maximum numbers of different types of output streams
4376          that can be configured and used simultaneously by a camera device.
4377          </description>
4378          <range>
4379          For processed (and stalling) format streams, &amp;gt;= 1.
4380
4381          For Raw format (either stalling or non-stalling) streams, &amp;gt;= 0.
4382
4383          For processed (but not stalling) format streams, &amp;gt;= 3
4384          for FULL mode devices (`android.info.supportedHardwareLevel == FULL`);
4385          &amp;gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`).
4386          </range>
4387          <details>
4388          This is a 3 element tuple that contains the max number of output simultaneous
4389          streams for raw sensor, processed (but not stalling), and processed (and stalling)
4390          formats respectively. For example, assuming that JPEG is typically a processed and
4391          stalling stream, if max raw sensor format output stream number is 1, max YUV streams
4392          number is 3, and max JPEG stream number is 2, then this tuple should be `(1, 3, 2)`.
4393
4394          This lists the upper bound of the number of output streams supported by
4395          the camera device. Using more streams simultaneously may require more hardware and
4396          CPU resources that will consume more power. The image format for an output stream can
4397          be any supported format provided by android.scaler.availableStreamConfigurations.
4398          The formats defined in android.scaler.availableStreamConfigurations can be catergorized
4399          into the 3 stream types as below:
4400
4401          * Processed (but stalling): any non-RAW format with a stallDurations &amp;gt; 0.
4402            Typically {@link AIMAGE_FORMAT_JPEG} format.
4403          * Raw formats: {@link AIMAGE_FORMAT_RAW16}, {@link AIMAGE_FORMAT_RAW10}, or
4404            {@link AIMAGE_FORMAT_RAW12}.
4405          * Processed (but not-stalling): any non-RAW format without a stall duration.
4406            Typically {@link AIMAGE_FORMAT_YUV_420_888}.
4407          </details>
4408          <tag id="BC" />
4409        </entry>
4410        <entry name="maxNumOutputRaw" type="int32" visibility="java_public" synthetic="true"
4411               hwlevel="legacy">
4412          <description>The maximum numbers of different types of output streams
4413          that can be configured and used simultaneously by a camera device
4414          for any `RAW` formats.
4415          </description>
4416          <range>
4417          &amp;gt;= 0
4418          </range>
4419          <details>
4420          This value contains the max number of output simultaneous
4421          streams from the raw sensor.
4422
4423          This lists the upper bound of the number of output streams supported by
4424          the camera device. Using more streams simultaneously may require more hardware and
4425          CPU resources that will consume more power. The image format for this kind of an output stream can
4426          be any `RAW` and supported format provided by android.scaler.streamConfigurationMap.
4427
4428          In particular, a `RAW` format is typically one of:
4429
4430          * {@link AIMAGE_FORMAT_RAW16}
4431          * {@link AIMAGE_FORMAT_RAW10}
4432          * {@link AIMAGE_FORMAT_RAW12}
4433
4434          LEGACY mode devices (android.info.supportedHardwareLevel `==` LEGACY)
4435          never support raw streams.
4436          </details>
4437        </entry>
4438        <entry name="maxNumOutputProc" type="int32" visibility="java_public" synthetic="true"
4439               hwlevel="legacy">
4440          <description>The maximum numbers of different types of output streams
4441          that can be configured and used simultaneously by a camera device
4442          for any processed (but not-stalling) formats.
4443          </description>
4444          <range>
4445          &amp;gt;= 3
4446          for FULL mode devices (`android.info.supportedHardwareLevel == FULL`);
4447          &amp;gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`).
4448          </range>
4449          <details>
4450          This value contains the max number of output simultaneous
4451          streams for any processed (but not-stalling) formats.
4452
4453          This lists the upper bound of the number of output streams supported by
4454          the camera device. Using more streams simultaneously may require more hardware and
4455          CPU resources that will consume more power. The image format for this kind of an output stream can
4456          be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap.
4457
4458          Processed (but not-stalling) is defined as any non-RAW format without a stall duration.
4459          Typically:
4460
4461          * {@link AIMAGE_FORMAT_YUV_420_888}
4462          * Implementation-defined formats, i.e. {@link
4463            android.hardware.camera2.params.StreamConfigurationMap#isOutputSupportedFor(Class)}
4464
4465          For full guarantees, query {@link
4466          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a
4467          processed format -- it will return 0 for a non-stalling stream.
4468
4469          LEGACY devices will support at least 2 processing/non-stalling streams.
4470          </details>
4471        </entry>
4472        <entry name="maxNumOutputProcStalling" type="int32" visibility="java_public" synthetic="true"
4473               hwlevel="legacy">
4474          <description>The maximum numbers of different types of output streams
4475          that can be configured and used simultaneously by a camera device
4476          for any processed (and stalling) formats.
4477          </description>
4478          <range>
4479          &amp;gt;= 1
4480          </range>
4481          <details>
4482          This value contains the max number of output simultaneous
4483          streams for any processed (but not-stalling) formats.
4484
4485          This lists the upper bound of the number of output streams supported by
4486          the camera device. Using more streams simultaneously may require more hardware and
4487          CPU resources that will consume more power. The image format for this kind of an output stream can
4488          be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap.
4489
4490          A processed and stalling format is defined as any non-RAW format with a stallDurations
4491          &amp;gt; 0.  Typically only the {@link AIMAGE_FORMAT_JPEG} format is a
4492          stalling format.
4493
4494          For full guarantees, query {@link
4495          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a
4496          processed format -- it will return a non-0 value for a stalling stream.
4497
4498          LEGACY devices will support up to 1 processing/stalling stream.
4499          </details>
4500        </entry>
4501        <entry name="maxNumReprocessStreams" type="int32" visibility="system"
4502        deprecated="true" container="array">
4503          <array>
4504            <size>1</size>
4505          </array>
4506          <description>How many reprocessing streams of any type
4507          can be allocated at the same time.</description>
4508          <range>&amp;gt;= 0</range>
4509          <details>
4510          Only used by HAL2.x.
4511
4512          When set to 0, it means no reprocess stream is supported.
4513          </details>
4514          <tag id="HAL2" />
4515        </entry>
4516        <entry name="maxNumInputStreams" type="int32" visibility="java_public" hwlevel="full">
4517          <description>
4518          The maximum numbers of any type of input streams
4519          that can be configured and used simultaneously by a camera device.
4520          </description>
4521          <range>
4522          0 or 1.
4523          </range>
4524          <details>When set to 0, it means no input stream is supported.
4525
4526          The image format for a input stream can be any supported format returned by {@link
4527          android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. When using an
4528          input stream, there must be at least one output stream configured to to receive the
4529          reprocessed images.
4530
4531          When an input stream and some output streams are used in a reprocessing request,
4532          only the input buffer will be used to produce these output stream buffers, and a
4533          new sensor image will not be captured.
4534
4535          For example, for Zero Shutter Lag (ZSL) still capture use case, the input
4536          stream image format will be PRIVATE, the associated output stream image format
4537          should be JPEG.
4538          </details>
4539          <hal_details>
4540          For the reprocessing flow and controls, see
4541          hardware/libhardware/include/hardware/camera3.h Section 10 for more details.
4542          </hal_details>
4543          <tag id="REPROC" />
4544        </entry>
4545      </static>
4546      <dynamic>
4547        <entry name="frameCount" type="int32" visibility="hidden" deprecated="true">
4548          <description>A frame counter set by the framework. This value monotonically
4549          increases with every new result (that is, each new result has a unique
4550          frameCount value).</description>
4551          <units>count of frames</units>
4552          <range>&amp;gt; 0</range>
4553          <details>Reset on release()</details>
4554        </entry>
4555        <clone entry="android.request.id" kind="controls"></clone>
4556        <clone entry="android.request.metadataMode"
4557        kind="controls"></clone>
4558        <clone entry="android.request.outputStreams"
4559        kind="controls"></clone>
4560        <entry name="pipelineDepth" type="byte" visibility="public" hwlevel="legacy">
4561          <description>Specifies the number of pipeline stages the frame went
4562          through from when it was exposed to when the final completed result
4563          was available to the framework.</description>
4564          <range>&amp;lt;= android.request.pipelineMaxDepth</range>
4565          <details>Depending on what settings are used in the request, and
4566          what streams are configured, the data may undergo less processing,
4567          and some pipeline stages skipped.
4568
4569          See android.request.pipelineMaxDepth for more details.
4570          </details>
4571          <hal_details>
4572          This value must always represent the accurate count of how many
4573          pipeline stages were actually used.
4574          </hal_details>
4575        </entry>
4576      </dynamic>
4577      <static>
4578        <entry name="pipelineMaxDepth" type="byte" visibility="public" hwlevel="legacy">
4579          <description>Specifies the number of maximum pipeline stages a frame
4580          has to go through from when it's exposed to when it's available
4581          to the framework.</description>
4582          <details>A typical minimum value for this is 2 (one stage to expose,
4583          one stage to readout) from the sensor. The ISP then usually adds
4584          its own stages to do custom HW processing. Further stages may be
4585          added by SW processing.
4586
4587          Depending on what settings are used (e.g. YUV, JPEG) and what
4588          processing is enabled (e.g. face detection), the actual pipeline
4589          depth (specified by android.request.pipelineDepth) may be less than
4590          the max pipeline depth.
4591
4592          A pipeline depth of X stages is equivalent to a pipeline latency of
4593          X frame intervals.
4594
4595          This value will normally be 8 or less, however, for high speed capture session,
4596          the max pipeline depth will be up to 8 x size of high speed capture request list.
4597          </details>
4598          <hal_details>
4599          This value should be 4 or less, expect for the high speed recording session, where the
4600          max batch sizes may be larger than 1.
4601          </hal_details>
4602        </entry>
4603        <entry name="partialResultCount" type="int32" visibility="public" optional="true">
4604          <description>Defines how many sub-components
4605          a result will be composed of.
4606          </description>
4607          <range>&amp;gt;= 1</range>
4608          <details>In order to combat the pipeline latency, partial results
4609          may be delivered to the application layer from the camera device as
4610          soon as they are available.
4611
4612          Optional; defaults to 1. A value of 1 means that partial
4613          results are not supported, and only the final TotalCaptureResult will
4614          be produced by the camera device.
4615
4616          A typical use case for this might be: after requesting an
4617          auto-focus (AF) lock the new AF state might be available 50%
4618          of the way through the pipeline.  The camera device could
4619          then immediately dispatch this state via a partial result to
4620          the application, and the rest of the metadata via later
4621          partial results.
4622          </details>
4623        </entry>
4624        <entry name="availableCapabilities" type="byte" visibility="public"
4625          enum="true" container="array" hwlevel="legacy">
4626          <array>
4627            <size>n</size>
4628          </array>
4629          <enum>
4630            <value>BACKWARD_COMPATIBLE
4631              <notes>The minimal set of capabilities that every camera
4632                device (regardless of android.info.supportedHardwareLevel)
4633                supports.
4634
4635                This capability is listed by all normal devices, and
4636                indicates that the camera device has a feature set
4637                that's comparable to the baseline requirements for the
4638                older android.hardware.Camera API.
4639
4640                Devices with the DEPTH_OUTPUT capability might not list this
4641                capability, indicating that they support only depth measurement,
4642                not standard color output.
4643              </notes>
4644            </value>
4645            <value optional="true">MANUAL_SENSOR
4646              <notes>
4647              The camera device can be manually controlled (3A algorithms such
4648              as auto-exposure, and auto-focus can be bypassed).
4649              The camera device supports basic manual control of the sensor image
4650              acquisition related stages. This means the following controls are
4651              guaranteed to be supported:
4652
4653              * Manual frame duration control
4654                  * android.sensor.frameDuration
4655                  * android.sensor.info.maxFrameDuration
4656              * Manual exposure control
4657                  * android.sensor.exposureTime
4658                  * android.sensor.info.exposureTimeRange
4659              * Manual sensitivity control
4660                  * android.sensor.sensitivity
4661                  * android.sensor.info.sensitivityRange
4662              * Manual lens control (if the lens is adjustable)
4663                  * android.lens.*
4664              * Manual flash control (if a flash unit is present)
4665                  * android.flash.*
4666              * Manual black level locking
4667                  * android.blackLevel.lock
4668              * Auto exposure lock
4669                  * android.control.aeLock
4670
4671              If any of the above 3A algorithms are enabled, then the camera
4672              device will accurately report the values applied by 3A in the
4673              result.
4674
4675              A given camera device may also support additional manual sensor controls,
4676              but this capability only covers the above list of controls.
4677
4678              If this is supported, android.scaler.streamConfigurationMap will
4679              additionally return a min frame duration that is greater than
4680              zero for each supported size-format combination.
4681              </notes>
4682            </value>
4683            <value optional="true">MANUAL_POST_PROCESSING
4684              <notes>
4685              The camera device post-processing stages can be manually controlled.
4686              The camera device supports basic manual control of the image post-processing
4687              stages. This means the following controls are guaranteed to be supported:
4688
4689              * Manual tonemap control
4690                  * android.tonemap.curve
4691                  * android.tonemap.mode
4692                  * android.tonemap.maxCurvePoints
4693                  * android.tonemap.gamma
4694                  * android.tonemap.presetCurve
4695
4696              * Manual white balance control
4697                  * android.colorCorrection.transform
4698                  * android.colorCorrection.gains
4699              * Manual lens shading map control
4700                    * android.shading.mode
4701                    * android.statistics.lensShadingMapMode
4702                    * android.statistics.lensShadingMap
4703                    * android.lens.info.shadingMapSize
4704              * Manual aberration correction control (if aberration correction is supported)
4705                    * android.colorCorrection.aberrationMode
4706                    * android.colorCorrection.availableAberrationModes
4707              * Auto white balance lock
4708                    * android.control.awbLock
4709
4710              If auto white balance is enabled, then the camera device
4711              will accurately report the values applied by AWB in the result.
4712
4713              A given camera device may also support additional post-processing
4714              controls, but this capability only covers the above list of controls.
4715              </notes>
4716            </value>
4717            <value optional="true">RAW
4718              <notes>
4719              The camera device supports outputting RAW buffers and
4720              metadata for interpreting them.
4721
4722              Devices supporting the RAW capability allow both for
4723              saving DNG files, and for direct application processing of
4724              raw sensor images.
4725
4726              * RAW_SENSOR is supported as an output format.
4727              * The maximum available resolution for RAW_SENSOR streams
4728                will match either the value in
4729                android.sensor.info.pixelArraySize or
4730                android.sensor.info.preCorrectionActiveArraySize.
4731              * All DNG-related optional metadata entries are provided
4732                by the camera device.
4733              </notes>
4734            </value>
4735            <value optional="true" ndk_hidden="true">PRIVATE_REPROCESSING
4736              <notes>
4737              The camera device supports the Zero Shutter Lag reprocessing use case.
4738
4739              * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`.
4740              * {@link android.graphics.ImageFormat#PRIVATE} is supported as an output/input format,
4741                that is, {@link android.graphics.ImageFormat#PRIVATE} is included in the lists of
4742                formats returned by {@link
4743                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link
4744                android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
4745              * {@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput}
4746                returns non empty int[] for each supported input format returned by {@link
4747                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}.
4748              * Each size returned by {@link
4749                android.hardware.camera2.params.StreamConfigurationMap#getInputSizes
4750                getInputSizes(ImageFormat.PRIVATE)} is also included in {@link
4751                android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes
4752                getOutputSizes(ImageFormat.PRIVATE)}
4753              * Using {@link android.graphics.ImageFormat#PRIVATE} does not cause a frame rate drop
4754                relative to the sensor's maximum capture rate (at that resolution).
4755              * {@link android.graphics.ImageFormat#PRIVATE} will be reprocessable into both
4756                {@link android.graphics.ImageFormat#YUV_420_888} and
4757                {@link android.graphics.ImageFormat#JPEG} formats.
4758              * The maximum available resolution for PRIVATE streams
4759                (both input/output) will match the maximum available
4760                resolution of JPEG streams.
4761              * Static metadata android.reprocess.maxCaptureStall.
4762              * Only below controls are effective for reprocessing requests and
4763                will be present in capture results, other controls in reprocess
4764                requests will be ignored by the camera device.
4765                    * android.jpeg.*
4766                    * android.noiseReduction.mode
4767                    * android.edge.mode
4768              * android.noiseReduction.availableNoiseReductionModes and
4769                android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode.
4770              </notes>
4771            </value>
4772            <value optional="true">READ_SENSOR_SETTINGS
4773              <notes>
4774              The camera device supports accurately reporting the sensor settings for many of
4775              the sensor controls while the built-in 3A algorithm is running.  This allows
4776              reporting of sensor settings even when these settings cannot be manually changed.
4777
4778              The values reported for the following controls are guaranteed to be available
4779              in the CaptureResult, including when 3A is enabled:
4780
4781              * Exposure control
4782                  * android.sensor.exposureTime
4783              * Sensitivity control
4784                  * android.sensor.sensitivity
4785              * Lens controls (if the lens is adjustable)
4786                  * android.lens.focusDistance
4787                  * android.lens.aperture
4788
4789              This capability is a subset of the MANUAL_SENSOR control capability, and will
4790              always be included if the MANUAL_SENSOR capability is available.
4791              </notes>
4792            </value>
4793            <value optional="true">BURST_CAPTURE
4794              <notes>
4795              The camera device supports capturing high-resolution images at &gt;= 20 frames per
4796              second, in at least the uncompressed YUV format, when post-processing settings are set
4797              to FAST. Additionally, maximum-resolution images can be captured at &gt;= 10 frames
4798              per second.  Here, 'high resolution' means at least 8 megapixels, or the maximum
4799              resolution of the device, whichever is smaller.
4800
4801              More specifically, this means that at least one output {@link
4802              AIMAGE_FORMAT_YUV_420_888} size listed in
4803              {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} is larger or equal to the
4804              'high resolution' defined above, and can be captured at at least 20 fps.
4805              For the largest {@link AIMAGE_FORMAT_YUV_420_888} size listed in
4806              {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}, camera device can capture this
4807              size for at least 10 frames per second.
4808              Also the android.control.aeAvailableTargetFpsRanges entry lists at least one FPS range
4809              where the minimum FPS is &gt;= 1 / minimumFrameDuration for the largest YUV_420_888 size.
4810
4811              If the device supports the {@link AIMAGE_FORMAT_RAW10}, {@link
4812              AIMAGE_FORMAT_RAW12}, then those can also be captured at the same rate
4813              as the maximum-size YUV_420_888 resolution is.
4814
4815              In addition, the android.sync.maxLatency field is guaranted to have a value between 0
4816              and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable
4817              are also guaranteed to be `true` so burst capture with these two locks ON yields
4818              consistent image output.
4819              </notes>
4820            </value>
4821            <value optional="true" ndk_hidden="true">YUV_REPROCESSING
4822              <notes>
4823              The camera device supports the YUV_420_888 reprocessing use case, similar as
4824              PRIVATE_REPROCESSING, This capability requires the camera device to support the
4825              following:
4826
4827              * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`.
4828              * {@link android.graphics.ImageFormat#YUV_420_888} is supported as an output/input format, that is,
4829                YUV_420_888 is included in the lists of formats returned by
4830                {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and
4831                {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
4832              * {@link
4833                android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput}
4834                returns non-empty int[] for each supported input format returned by {@link
4835                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}.
4836              * Each size returned by {@link
4837                android.hardware.camera2.params.StreamConfigurationMap#getInputSizes
4838                getInputSizes(YUV_420_888)} is also included in {@link
4839                android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes
4840                getOutputSizes(YUV_420_888)}
4841              * Using {@link android.graphics.ImageFormat#YUV_420_888} does not cause a frame rate drop
4842                relative to the sensor's maximum capture rate (at that resolution).
4843              * {@link android.graphics.ImageFormat#YUV_420_888} will be reprocessable into both
4844                {@link android.graphics.ImageFormat#YUV_420_888} and {@link
4845                android.graphics.ImageFormat#JPEG} formats.
4846              * The maximum available resolution for {@link
4847                android.graphics.ImageFormat#YUV_420_888} streams (both input/output) will match the
4848                maximum available resolution of {@link android.graphics.ImageFormat#JPEG} streams.
4849              * Static metadata android.reprocess.maxCaptureStall.
4850              * Only the below controls are effective for reprocessing requests and will be present
4851                in capture results. The reprocess requests are from the original capture results that
4852                are associated with the intermediate {@link android.graphics.ImageFormat#YUV_420_888}
4853                output buffers.  All other controls in the reprocess requests will be ignored by the
4854                camera device.
4855                    * android.jpeg.*
4856                    * android.noiseReduction.mode
4857                    * android.edge.mode
4858                    * android.reprocess.effectiveExposureFactor
4859              * android.noiseReduction.availableNoiseReductionModes and
4860                android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode.
4861              </notes>
4862            </value>
4863            <value optional="true">DEPTH_OUTPUT
4864              <notes>
4865              The camera device can produce depth measurements from its field of view.
4866
4867              This capability requires the camera device to support the following:
4868
4869              * {@link AIMAGE_FORMAT_DEPTH16} is supported as an output format.
4870              * {@link AIMAGE_FORMAT_DEPTH_POINT_CLOUD} is optionally supported as an
4871                output format.
4872              * This camera device, and all camera devices with the same android.lens.facing,
4873                will list the following calibration entries in {@link ACameraMetadata} from both
4874                {@link ACameraManager_getCameraCharacteristics} and
4875                {@link ACameraCaptureSession_captureCallback_result}:
4876                  - android.lens.poseTranslation
4877                  - android.lens.poseRotation
4878                  - android.lens.intrinsicCalibration
4879                  - android.lens.radialDistortion
4880              * The android.depth.depthIsExclusive entry is listed by this device.
4881              * A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support
4882                normal YUV_420_888, JPEG, and PRIV-format outputs. It only has to support the DEPTH16
4883                format.
4884
4885              Generally, depth output operates at a slower frame rate than standard color capture,
4886              so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
4887              should be accounted for (see
4888              {@link ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS}).
4889              On a device that supports both depth and color-based output, to enable smooth preview,
4890              using a repeating burst is recommended, where a depth-output target is only included
4891              once every N frames, where N is the ratio between preview output rate and depth output
4892              rate, including depth stall time.
4893              </notes>
4894            </value>
4895            <value optional="true" ndk_hidden="true">CONSTRAINED_HIGH_SPEED_VIDEO
4896              <notes>
4897              The device supports constrained high speed video recording (frame rate >=120fps)
4898              use case. The camera device will support high speed capture session created by
4899              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}, which
4900              only accepts high speed request lists created by
4901              {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
4902
4903              A camera device can still support high speed video streaming by advertising the high speed
4904              FPS ranges in android.control.aeAvailableTargetFpsRanges. For this case, all normal
4905              capture request per frame control and synchronization requirements will apply to
4906              the high speed fps ranges, the same as all other fps ranges. This capability describes
4907              the capability of a specialized operating mode with many limitations (see below), which
4908              is only targeted at high speed video recording.
4909
4910              The supported high speed video sizes and fps ranges are specified in
4911              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
4912              To get desired output frame rates, the application is only allowed to select video size
4913              and FPS range combinations provided by
4914              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.
4915              The fps range can be controlled via android.control.aeTargetFpsRange.
4916
4917              In this capability, the camera device will override aeMode, awbMode, and afMode to
4918              ON, AUTO, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
4919              controls will be overridden to be FAST. Therefore, no manual control of capture
4920              and post-processing parameters is possible. All other controls operate the
4921              same as when android.control.mode == AUTO. This means that all other
4922              android.control.* fields continue to work, such as
4923
4924              * android.control.aeTargetFpsRange
4925              * android.control.aeExposureCompensation
4926              * android.control.aeLock
4927              * android.control.awbLock
4928              * android.control.effectMode
4929              * android.control.aeRegions
4930              * android.control.afRegions
4931              * android.control.awbRegions
4932              * android.control.afTrigger
4933              * android.control.aePrecaptureTrigger
4934
4935              Outside of android.control.*, the following controls will work:
4936
4937              * android.flash.mode (TORCH mode only, automatic flash for still capture will not
4938              work since aeMode is ON)
4939              * android.lens.opticalStabilizationMode (if it is supported)
4940              * android.scaler.cropRegion
4941              * android.statistics.faceDetectMode (if it is supported)
4942
4943              For high speed recording use case, the actual maximum supported frame rate may
4944              be lower than what camera can output, depending on the destination Surfaces for
4945              the image data. For example, if the destination surface is from video encoder,
4946              the application need check if the video encoder is capable of supporting the
4947              high frame rate for a given video size, or it will end up with lower recording
4948              frame rate. If the destination surface is from preview window, the actual preview frame
4949              rate will be bounded by the screen refresh rate.
4950
4951              The camera device will only support up to 2 high speed simultaneous output surfaces
4952              (preview and recording surfaces)
4953              in this mode. Above controls will be effective only if all of below conditions are true:
4954
4955              * The application creates a camera capture session with no more than 2 surfaces via
4956              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. The
4957              targeted surfaces must be preview surface (either from
4958              {@link android.view.SurfaceView} or {@link android.graphics.SurfaceTexture}) or
4959              recording surface(either from {@link android.media.MediaRecorder#getSurface} or
4960              {@link android.media.MediaCodec#createInputSurface}).
4961              * The stream sizes are selected from the sizes reported by
4962              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.
4963              * The FPS ranges are selected from
4964              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
4965
4966              When above conditions are NOT satistied,
4967              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
4968              will fail.
4969
4970              Switching to a FPS range that has different maximum FPS may trigger some camera device
4971              reconfigurations, which may introduce extra latency. It is recommended that
4972              the application avoids unnecessary maximum target FPS changes as much as possible
4973              during high speed streaming.
4974              </notes>
4975            </value>
4976          </enum>
4977          <description>List of capabilities that this camera device
4978          advertises as fully supporting.</description>
4979          <details>
4980          A capability is a contract that the camera device makes in order
4981          to be able to satisfy one or more use cases.
4982
4983          Listing a capability guarantees that the whole set of features
4984          required to support a common use will all be available.
4985
4986          Using a subset of the functionality provided by an unsupported
4987          capability may be possible on a specific camera device implementation;
4988          to do this query each of android.request.availableRequestKeys,
4989          android.request.availableResultKeys,
4990          android.request.availableCharacteristicsKeys.
4991
4992          The following capabilities are guaranteed to be available on
4993          android.info.supportedHardwareLevel `==` FULL devices:
4994
4995          * MANUAL_SENSOR
4996          * MANUAL_POST_PROCESSING
4997
4998          Other capabilities may be available on either FULL or LIMITED
4999          devices, but the application should query this key to be sure.
5000          </details>
5001          <hal_details>
5002          Additional constraint details per-capability will be available
5003          in the Compatibility Test Suite.
5004
5005          Minimum baseline requirements required for the
5006          BACKWARD_COMPATIBLE capability are not explicitly listed.
5007          Instead refer to "BC" tags and the camera CTS tests in the
5008          android.hardware.camera2.cts package.
5009
5010          Listed controls that can be either request or result (e.g.
5011          android.sensor.exposureTime) must be available both in the
5012          request and the result in order to be considered to be
5013          capability-compliant.
5014
5015          For example, if the HAL claims to support MANUAL control,
5016          then exposure time must be configurable via the request _and_
5017          the actual exposure applied must be available via
5018          the result.
5019
5020          If MANUAL_SENSOR is omitted, the HAL may choose to omit the
5021          android.scaler.availableMinFrameDurations static property entirely.
5022
5023          For PRIVATE_REPROCESSING and YUV_REPROCESSING capabilities, see
5024          hardware/libhardware/include/hardware/camera3.h Section 10 for more information.
5025
5026          Devices that support the MANUAL_SENSOR capability must support the
5027          CAMERA3_TEMPLATE_MANUAL template defined in camera3.h.
5028
5029          Devices that support the PRIVATE_REPROCESSING capability or the
5030          YUV_REPROCESSING capability must support the
5031          CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template defined in camera3.h.
5032
5033          For DEPTH_OUTPUT, the depth-format keys
5034          android.depth.availableDepthStreamConfigurations,
5035          android.depth.availableDepthMinFrameDurations,
5036          android.depth.availableDepthStallDurations must be available, in
5037          addition to the other keys explicitly mentioned in the DEPTH_OUTPUT
5038          enum notes. The entry android.depth.maxDepthSamples must be available
5039          if the DEPTH_POINT_CLOUD format is supported (HAL pixel format BLOB, dataspace
5040          DEPTH).
5041          </hal_details>
5042        </entry>
5043        <entry name="availableRequestKeys" type="int32" visibility="ndk_public"
5044               container="array" hwlevel="legacy">
5045          <array>
5046            <size>n</size>
5047          </array>
5048          <description>A list of all keys that the camera device has available
5049          to use with {@link ACaptureRequest}.</description>
5050
5051          <details>Attempting to set a key into a CaptureRequest that is not
5052          listed here will result in an invalid request and will be rejected
5053          by the camera device.
5054
5055          This field can be used to query the feature set of a camera device
5056          at a more granular level than capabilities. This is especially
5057          important for optional keys that are not listed under any capability
5058          in android.request.availableCapabilities.
5059          </details>
5060          <hal_details>
5061          Vendor tags must not be listed here. Use the vendor tag metadata
5062          extensions C api instead (refer to camera3.h for more details).
5063
5064          Setting/getting vendor tags will be checked against the metadata
5065          vendor extensions API and not against this field.
5066
5067          The HAL must not consume any request tags that are not listed either
5068          here or in the vendor tag list.
5069
5070          The public camera2 API will always make the vendor tags visible
5071          via
5072          {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}.
5073          </hal_details>
5074        </entry>
5075        <entry name="availableResultKeys" type="int32" visibility="ndk_public"
5076               container="array" hwlevel="legacy">
5077          <array>
5078            <size>n</size>
5079          </array>
5080          <description>A list of all keys that the camera device has available
5081          to query with {@link ACameraMetadata} from
5082          {@link ACameraCaptureSession_captureCallback_result}.</description>
5083
5084          <details>Attempting to get a key from a CaptureResult that is not
5085          listed here will always return a `null` value. Getting a key from
5086          a CaptureResult that is listed here will generally never return a `null`
5087          value.
5088
5089          The following keys may return `null` unless they are enabled:
5090
5091          * android.statistics.lensShadingMap (non-null iff android.statistics.lensShadingMapMode == ON)
5092
5093          (Those sometimes-null keys will nevertheless be listed here
5094          if they are available.)
5095
5096          This field can be used to query the feature set of a camera device
5097          at a more granular level than capabilities. This is especially
5098          important for optional keys that are not listed under any capability
5099          in android.request.availableCapabilities.
5100          </details>
5101          <hal_details>
5102          Tags listed here must always have an entry in the result metadata,
5103          even if that size is 0 elements. Only array-type tags (e.g. lists,
5104          matrices, strings) are allowed to have 0 elements.
5105
5106          Vendor tags must not be listed here. Use the vendor tag metadata
5107          extensions C api instead (refer to camera3.h for more details).
5108
5109          Setting/getting vendor tags will be checked against the metadata
5110          vendor extensions API and not against this field.
5111
5112          The HAL must not produce any result tags that are not listed either
5113          here or in the vendor tag list.
5114
5115          The public camera2 API will always make the vendor tags visible via {@link
5116          android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}.
5117          </hal_details>
5118        </entry>
5119        <entry name="availableCharacteristicsKeys" type="int32" visibility="ndk_public"
5120               container="array" hwlevel="legacy">
5121          <array>
5122            <size>n</size>
5123          </array>
5124          <description>A list of all keys that the camera device has available
5125          to query with {@link ACameraMetadata} from
5126          {@link ACameraManager_getCameraCharacteristics}.</description>
5127          <details>This entry follows the same rules as
5128          android.request.availableResultKeys (except that it applies for
5129          CameraCharacteristics instead of CaptureResult). See above for more
5130          details.
5131          </details>
5132          <hal_details>
5133          Keys listed here must always have an entry in the static info metadata,
5134          even if that size is 0 elements. Only array-type tags (e.g. lists,
5135          matrices, strings) are allowed to have 0 elements.
5136
5137          Vendor tags must not be listed here. Use the vendor tag metadata
5138          extensions C api instead (refer to camera3.h for more details).
5139
5140          Setting/getting vendor tags will be checked against the metadata
5141          vendor extensions API and not against this field.
5142
5143          The HAL must not have any tags in its static info that are not listed
5144          either here or in the vendor tag list.
5145
5146          The public camera2 API will always make the vendor tags visible
5147          via {@link android.hardware.camera2.CameraCharacteristics#getKeys}.
5148          </hal_details>
5149        </entry>
5150      </static>
5151    </section>
5152    <section name="scaler">
5153      <controls>
5154        <entry name="cropRegion" type="int32" visibility="public"
5155               container="array" typedef="rectangle" hwlevel="legacy">
5156          <array>
5157            <size>4</size>
5158          </array>
5159          <description>The desired region of the sensor to read out for this capture.</description>
5160          <units>Pixel coordinates relative to
5161          android.sensor.info.activeArraySize</units>
5162          <details>
5163            This control can be used to implement digital zoom.
5164
5165            The data representation is int[4], which maps to (left, top, width, height).
5166
5167            The crop region coordinate system is based off
5168            android.sensor.info.activeArraySize, with `(0, 0)` being the
5169            top-left corner of the sensor active array.
5170
5171            Output streams use this rectangle to produce their output,
5172            cropping to a smaller region if necessary to maintain the
5173            stream's aspect ratio, then scaling the sensor input to
5174            match the output's configured resolution.
5175
5176            The crop region is applied after the RAW to other color
5177            space (e.g. YUV) conversion. Since raw streams
5178            (e.g. RAW16) don't have the conversion stage, they are not
5179            croppable. The crop region will be ignored by raw streams.
5180
5181            For non-raw streams, any additional per-stream cropping will
5182            be done to maximize the final pixel area of the stream.
5183
5184            For example, if the crop region is set to a 4:3 aspect
5185            ratio, then 4:3 streams will use the exact crop
5186            region. 16:9 streams will further crop vertically
5187            (letterbox).
5188
5189            Conversely, if the crop region is set to a 16:9, then 4:3
5190            outputs will crop horizontally (pillarbox), and 16:9
5191            streams will match exactly. These additional crops will
5192            be centered within the crop region.
5193
5194            The width and height of the crop region cannot
5195            be set to be smaller than
5196            `floor( activeArraySize.width / android.scaler.availableMaxDigitalZoom )` and
5197            `floor( activeArraySize.height / android.scaler.availableMaxDigitalZoom )`, respectively.
5198
5199            The camera device may adjust the crop region to account
5200            for rounding and other hardware requirements; the final
5201            crop region used will be included in the output capture
5202            result.
5203          </details>
5204          <hal_details>
5205            The output streams must maintain square pixels at all
5206            times, no matter what the relative aspect ratios of the
5207            crop region and the stream are.  Negative values for
5208            corner are allowed for raw output if full pixel array is
5209            larger than active pixel array. Width and height may be
5210            rounded to nearest larger supportable width, especially
5211            for raw output, where only a few fixed scales may be
5212            possible.
5213
5214            For a set of output streams configured, if the sensor output is cropped to a smaller
5215            size than active array size, the HAL need follow below cropping rules:
5216
5217            * The HAL need handle the cropRegion as if the sensor crop size is the effective active
5218            array size.More specifically, the HAL must transform the request cropRegion from
5219            android.sensor.info.activeArraySize to the sensor cropped pixel area size in this way:
5220                1. Translate the requested cropRegion w.r.t., the left top corner of the sensor
5221                cropped pixel area by (tx, ty),
5222                where `tx = sensorCrop.top * (sensorCrop.height / activeArraySize.height)`
5223                and `tx = sensorCrop.left * (sensorCrop.width / activeArraySize.width)`. The
5224                (sensorCrop.top, sensorCrop.left) is the coordinate based off the
5225                android.sensor.info.activeArraySize.
5226                2. Scale the width and height of requested cropRegion with scaling factor of
5227                sensorCrop.width/activeArraySize.width and sensorCrop.height/activeArraySize.height
5228                respectively.
5229            Once this new cropRegion is calculated, the HAL must use this region to crop the image
5230            with regard to the sensor crop size (effective active array size). The HAL still need
5231            follow the general cropping rule for this new cropRegion and effective active
5232            array size.
5233
5234            * The HAL must report the cropRegion with regard to android.sensor.info.activeArraySize.
5235            The HAL need convert the new cropRegion generated above w.r.t., full active array size.
5236            The reported cropRegion may be slightly different with the requested cropRegion since
5237            the HAL may adjust the crop region to account for rounding, conversion error, or other
5238            hardware limitations.
5239
5240            HAL2.x uses only (x, y, width)
5241          </hal_details>
5242          <tag id="BC" />
5243        </entry>
5244      </controls>
5245      <static>
5246        <entry name="availableFormats" type="int32"
5247        visibility="hidden" deprecated="true" enum="true"
5248        container="array" typedef="imageFormat">
5249          <array>
5250            <size>n</size>
5251          </array>
5252          <enum>
5253            <value optional="true" id="0x20">RAW16
5254              <notes>
5255              RAW16 is a standard, cross-platform format for raw image
5256              buffers with 16-bit pixels.
5257
5258              Buffers of this format are typically expected to have a
5259              Bayer Color Filter Array (CFA) layout, which is given in
5260              android.sensor.info.colorFilterArrangement. Sensors with
5261              CFAs that are not representable by a format in
5262              android.sensor.info.colorFilterArrangement should not
5263              use this format.
5264
5265              Buffers of this format will also follow the constraints given for
5266              RAW_OPAQUE buffers, but with relaxed performance constraints.
5267
5268              This format is intended to give users access to the full contents
5269              of the buffers coming directly from the image sensor prior to any
5270              cropping or scaling operations, and all coordinate systems for
5271              metadata used for this format are relative to the size of the
5272              active region of the image sensor before any geometric distortion
5273              correction has been applied (i.e.
5274              android.sensor.info.preCorrectionActiveArraySize). Supported
5275              dimensions for this format are limited to the full dimensions of
5276              the sensor (e.g. either android.sensor.info.pixelArraySize or
5277              android.sensor.info.preCorrectionActiveArraySize will be the
5278              only supported output size).
5279
5280              See android.scaler.availableInputOutputFormatsMap for
5281              the full set of performance guarantees.
5282              </notes>
5283            </value>
5284            <value optional="true" id="0x24">RAW_OPAQUE
5285              <notes>
5286              RAW_OPAQUE (or
5287              {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}
5288              as referred in public API) is a format for raw image buffers
5289              coming from an image sensor.
5290
5291              The actual structure of buffers of this format is
5292              platform-specific, but must follow several constraints:
5293
5294              1. No image post-processing operations may have been applied to
5295              buffers of this type. These buffers contain raw image data coming
5296              directly from the image sensor.
5297              1. If a buffer of this format is passed to the camera device for
5298              reprocessing, the resulting images will be identical to the images
5299              produced if the buffer had come directly from the sensor and was
5300              processed with the same settings.
5301
5302              The intended use for this format is to allow access to the native
5303              raw format buffers coming directly from the camera sensor without
5304              any additional conversions or decrease in framerate.
5305
5306              See android.scaler.availableInputOutputFormatsMap for the full set of
5307              performance guarantees.
5308              </notes>
5309            </value>
5310            <value optional="true" id="0x32315659">YV12
5311              <notes>YCrCb 4:2:0 Planar</notes>
5312            </value>
5313            <value optional="true" id="0x11">YCrCb_420_SP
5314              <notes>NV21</notes>
5315            </value>
5316            <value id="0x22">IMPLEMENTATION_DEFINED
5317              <notes>System internal format, not application-accessible</notes>
5318            </value>
5319            <value id="0x23">YCbCr_420_888
5320              <notes>Flexible YUV420 Format</notes>
5321            </value>
5322            <value id="0x21">BLOB
5323              <notes>JPEG format</notes>
5324            </value>
5325          </enum>
5326          <description>The list of image formats that are supported by this
5327          camera device for output streams.</description>
5328          <details>
5329          All camera devices will support JPEG and YUV_420_888 formats.
5330
5331          When set to YUV_420_888, application can access the YUV420 data directly.
5332          </details>
5333          <hal_details>
5334          These format values are from HAL_PIXEL_FORMAT_* in
5335          system/core/include/system/graphics.h.
5336
5337          When IMPLEMENTATION_DEFINED is used, the platform
5338          gralloc module will select a format based on the usage flags provided
5339          by the camera HAL device and the other endpoint of the stream. It is
5340          usually used by preview and recording streams, where the application doesn't
5341          need access the image data.
5342
5343          YCbCr_420_888 format must be supported by the HAL. When an image stream
5344          needs CPU/application direct access, this format will be used.
5345
5346          The BLOB format must be supported by the HAL. This is used for the JPEG stream.
5347
5348          A RAW_OPAQUE buffer should contain only pixel data. It is strongly
5349          recommended that any information used by the camera device when
5350          processing images is fully expressed by the result metadata
5351          for that image buffer.
5352          </hal_details>
5353          <tag id="BC" />
5354        </entry>
5355        <entry name="availableJpegMinDurations" type="int64" visibility="hidden" deprecated="true"
5356        container="array">
5357          <array>
5358            <size>n</size>
5359          </array>
5360          <description>The minimum frame duration that is supported
5361          for each resolution in android.scaler.availableJpegSizes.
5362          </description>
5363          <units>Nanoseconds</units>
5364          <range>TODO: Remove property.</range>
5365          <details>
5366          This corresponds to the minimum steady-state frame duration when only
5367          that JPEG stream is active and captured in a burst, with all
5368          processing (typically in android.*.mode) set to FAST.
5369
5370          When multiple streams are configured, the minimum
5371          frame duration will be &amp;gt;= max(individual stream min
5372          durations)</details>
5373          <tag id="BC" />
5374        </entry>
5375        <entry name="availableJpegSizes" type="int32" visibility="hidden"
5376        deprecated="true" container="array" typedef="size">
5377          <array>
5378            <size>n</size>
5379            <size>2</size>
5380          </array>
5381          <description>The JPEG resolutions that are supported by this camera device.</description>
5382          <range>TODO: Remove property.</range>
5383          <details>
5384          The resolutions are listed as `(width, height)` pairs. All camera devices will support
5385          sensor maximum resolution (defined by android.sensor.info.activeArraySize).
5386          </details>
5387          <hal_details>
5388          The HAL must include sensor maximum resolution
5389          (defined by android.sensor.info.activeArraySize),
5390          and should include half/quarter of sensor maximum resolution.
5391          </hal_details>
5392          <tag id="BC" />
5393        </entry>
5394        <entry name="availableMaxDigitalZoom" type="float" visibility="public"
5395              hwlevel="legacy">
5396          <description>The maximum ratio between both active area width
5397          and crop region width, and active area height and
5398          crop region height, for android.scaler.cropRegion.
5399          </description>
5400          <units>Zoom scale factor</units>
5401          <range>&amp;gt;=1</range>
5402          <details>
5403          This represents the maximum amount of zooming possible by
5404          the camera device, or equivalently, the minimum cropping
5405          window size.
5406
5407          Crop regions that have a width or height that is smaller
5408          than this ratio allows will be rounded up to the minimum
5409          allowed size by the camera device.
5410          </details>
5411          <tag id="BC" />
5412        </entry>
5413        <entry name="availableProcessedMinDurations" type="int64" visibility="hidden" deprecated="true"
5414        container="array">
5415          <array>
5416            <size>n</size>
5417          </array>
5418          <description>For each available processed output size (defined in
5419          android.scaler.availableProcessedSizes), this property lists the
5420          minimum supportable frame duration for that size.
5421          </description>
5422          <units>Nanoseconds</units>
5423          <details>
5424          This should correspond to the frame duration when only that processed
5425          stream is active, with all processing (typically in android.*.mode)
5426          set to FAST.
5427
5428          When multiple streams are configured, the minimum frame duration will
5429          be &amp;gt;= max(individual stream min durations).
5430          </details>
5431          <tag id="BC" />
5432        </entry>
5433        <entry name="availableProcessedSizes" type="int32" visibility="hidden"
5434        deprecated="true" container="array" typedef="size">
5435          <array>
5436            <size>n</size>
5437            <size>2</size>
5438          </array>
5439          <description>The resolutions available for use with
5440          processed output streams, such as YV12, NV12, and
5441          platform opaque YUV/RGB streams to the GPU or video
5442          encoders.</description>
5443          <details>
5444          The resolutions are listed as `(width, height)` pairs.
5445
5446          For a given use case, the actual maximum supported resolution
5447          may be lower than what is listed here, depending on the destination
5448          Surface for the image data. For example, for recording video,
5449          the video encoder chosen may have a maximum size limit (e.g. 1080p)
5450          smaller than what the camera (e.g. maximum resolution is 3264x2448)
5451          can provide.
5452
5453          Please reference the documentation for the image data destination to
5454          check if it limits the maximum size for image data.
5455          </details>
5456          <hal_details>
5457          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
5458          the HAL must include all JPEG sizes listed in android.scaler.availableJpegSizes
5459          and each below resolution if it is smaller than or equal to the sensor
5460          maximum resolution (if they are not listed in JPEG sizes already):
5461
5462          * 240p (320 x 240)
5463          * 480p (640 x 480)
5464          * 720p (1280 x 720)
5465          * 1080p (1920 x 1080)
5466
5467          For LIMITED capability devices (`android.info.supportedHardwareLevel == LIMITED`),
5468          the HAL only has to list up to the maximum video size supported by the devices.
5469          </hal_details>
5470          <tag id="BC" />
5471        </entry>
5472        <entry name="availableRawMinDurations" type="int64" deprecated="true"
5473        container="array">
5474          <array>
5475            <size>n</size>
5476          </array>
5477          <description>
5478          For each available raw output size (defined in
5479          android.scaler.availableRawSizes), this property lists the minimum
5480          supportable frame duration for that size.
5481          </description>
5482          <units>Nanoseconds</units>
5483          <details>
5484          Should correspond to the frame duration when only the raw stream is
5485          active.
5486
5487          When multiple streams are configured, the minimum
5488          frame duration will be &amp;gt;= max(individual stream min
5489          durations)</details>
5490          <tag id="BC" />
5491        </entry>
5492        <entry name="availableRawSizes" type="int32" deprecated="true"
5493        container="array" typedef="size">
5494          <array>
5495            <size>n</size>
5496            <size>2</size>
5497          </array>
5498          <description>The resolutions available for use with raw
5499          sensor output streams, listed as width,
5500          height</description>
5501        </entry>
5502      </static>
5503      <dynamic>
5504        <clone entry="android.scaler.cropRegion" kind="controls">
5505        </clone>
5506      </dynamic>
5507      <static>
5508        <entry name="availableInputOutputFormatsMap" type="int32" visibility="hidden"
5509          typedef="reprocessFormatsMap">
5510          <description>The mapping of image formats that are supported by this
5511          camera device for input streams, to their corresponding output formats.
5512          </description>
5513          <details>
5514          All camera devices with at least 1
5515          android.request.maxNumInputStreams will have at least one
5516          available input format.
5517
5518          The camera device will support the following map of formats,
5519          if its dependent capability (android.request.availableCapabilities) is supported:
5520
5521            Input Format                                    | Output Format                                     | Capability
5522          :-------------------------------------------------|:--------------------------------------------------|:----------
5523          {@link android.graphics.ImageFormat#PRIVATE}      | {@link android.graphics.ImageFormat#JPEG}         | PRIVATE_REPROCESSING
5524          {@link android.graphics.ImageFormat#PRIVATE}      | {@link android.graphics.ImageFormat#YUV_420_888}  | PRIVATE_REPROCESSING
5525          {@link android.graphics.ImageFormat#YUV_420_888}  | {@link android.graphics.ImageFormat#JPEG}         | YUV_REPROCESSING
5526          {@link android.graphics.ImageFormat#YUV_420_888}  | {@link android.graphics.ImageFormat#YUV_420_888}  | YUV_REPROCESSING
5527
5528          PRIVATE refers to a device-internal format that is not directly application-visible.  A
5529          PRIVATE input surface can be acquired by {@link android.media.ImageReader#newInstance}
5530          with {@link android.graphics.ImageFormat#PRIVATE} as the format.
5531
5532          For a PRIVATE_REPROCESSING-capable camera device, using the PRIVATE format as either input
5533          or output will never hurt maximum frame rate (i.e.  {@link
5534          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration
5535          getOutputStallDuration(ImageFormat.PRIVATE, size)} is always 0),
5536
5537          Attempting to configure an input stream with output streams not
5538          listed as available in this map is not valid.
5539          </details>
5540          <hal_details>
5541          For the formats, see `system/core/include/system/graphics.h` for a definition
5542          of the image format enumerations. The PRIVATE format refers to the
5543          HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format. The HAL could determine
5544          the actual format by using the gralloc usage flags.
5545          For ZSL use case in particular, the HAL could choose appropriate format (partially
5546          processed YUV or RAW based format) by checking the format and GRALLOC_USAGE_HW_CAMERA_ZSL.
5547          See camera3.h for more details.
5548
5549          This value is encoded as a variable-size array-of-arrays.
5550          The inner array always contains `[format, length, ...]` where
5551          `...` has `length` elements. An inner array is followed by another
5552          inner array if the total metadata entry size hasn't yet been exceeded.
5553
5554          A code sample to read/write this encoding (with a device that
5555          supports reprocessing IMPLEMENTATION_DEFINED to YUV_420_888, and JPEG,
5556          and reprocessing YUV_420_888 to YUV_420_888 and JPEG):
5557
5558              // reading
5559              int32_t* contents = &amp;entry.i32[0];
5560              for (size_t i = 0; i &lt; entry.count; ) {
5561                  int32_t format = contents[i++];
5562                  int32_t length = contents[i++];
5563                  int32_t output_formats[length];
5564                  memcpy(&amp;output_formats[0], &amp;contents[i],
5565                         length * sizeof(int32_t));
5566                  i += length;
5567              }
5568
5569              // writing (static example, PRIVATE_REPROCESSING + YUV_REPROCESSING)
5570              int32_t[] contents = {
5571                IMPLEMENTATION_DEFINED, 2, YUV_420_888, BLOB,
5572                YUV_420_888, 2, YUV_420_888, BLOB,
5573              };
5574              update_camera_metadata_entry(metadata, index, &amp;contents[0],
5575                    sizeof(contents)/sizeof(contents[0]), &amp;updated_entry);
5576
5577          If the HAL claims to support any of the capabilities listed in the
5578          above details, then it must also support all the input-output
5579          combinations listed for that capability. It can optionally support
5580          additional formats if it so chooses.
5581          </hal_details>
5582          <tag id="REPROC" />
5583        </entry>
5584        <entry name="availableStreamConfigurations" type="int32" visibility="ndk_public"
5585               enum="true" container="array" typedef="streamConfiguration" hwlevel="legacy">
5586          <array>
5587            <size>n</size>
5588            <size>4</size>
5589          </array>
5590          <enum>
5591            <value>OUTPUT</value>
5592            <value>INPUT</value>
5593          </enum>
5594          <description>The available stream configurations that this
5595          camera device supports
5596          (i.e. format, width, height, output/input stream).
5597          </description>
5598          <details>
5599          The configurations are listed as `(format, width, height, input?)`
5600          tuples.
5601
5602          For a given use case, the actual maximum supported resolution
5603          may be lower than what is listed here, depending on the destination
5604          Surface for the image data. For example, for recording video,
5605          the video encoder chosen may have a maximum size limit (e.g. 1080p)
5606          smaller than what the camera (e.g. maximum resolution is 3264x2448)
5607          can provide.
5608
5609          Please reference the documentation for the image data destination to
5610          check if it limits the maximum size for image data.
5611
5612          Not all output formats may be supported in a configuration with
5613          an input stream of a particular format. For more details, see
5614          android.scaler.availableInputOutputFormatsMap.
5615
5616          The following table describes the minimum required output stream
5617          configurations based on the hardware level
5618          (android.info.supportedHardwareLevel):
5619
5620          Format         | Size                                         | Hardware Level | Notes
5621          :-------------:|:--------------------------------------------:|:--------------:|:--------------:
5622          JPEG           | android.sensor.info.activeArraySize          | Any            |
5623          JPEG           | 1920x1080 (1080p)                            | Any            | if 1080p &lt;= activeArraySize
5624          JPEG           | 1280x720 (720)                               | Any            | if 720p &lt;= activeArraySize
5625          JPEG           | 640x480 (480p)                               | Any            | if 480p &lt;= activeArraySize
5626          JPEG           | 320x240 (240p)                               | Any            | if 240p &lt;= activeArraySize
5627          YUV_420_888    | all output sizes available for JPEG          | FULL           |
5628          YUV_420_888    | all output sizes available for JPEG, up to the maximum video size | LIMITED        |
5629          IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |
5630
5631          Refer to android.request.availableCapabilities for additional
5632          mandatory stream configurations on a per-capability basis.
5633          </details>
5634          <hal_details>
5635          It is recommended (but not mandatory) to also include half/quarter
5636          of sensor maximum resolution for JPEG formats (regardless of hardware
5637          level).
5638
5639          (The following is a rewording of the above required table):
5640
5641          For JPEG format, the sizes may be restricted by below conditions:
5642
5643          * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
5644          (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution
5645          (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these,
5646          it does not have to be included in the supported JPEG sizes.
5647          * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as
5648          the dimensions being a multiple of 16.
5649
5650          Therefore, the maximum JPEG size may be smaller than sensor maximum resolution.
5651          However, the largest JPEG size must be as close as possible to the sensor maximum
5652          resolution given above constraints. It is required that after aspect ratio adjustments,
5653          additional size reduction due to other issues must be less than 3% in area. For example,
5654          if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect
5655          ratio 4:3, the JPEG encoder alignment requirement is 16, the maximum JPEG size will be
5656          3264x2448.
5657
5658          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
5659          the HAL must include all YUV_420_888 sizes that have JPEG sizes listed
5660          here as output streams.
5661
5662          It must also include each below resolution if it is smaller than or
5663          equal to the sensor maximum resolution (for both YUV_420_888 and JPEG
5664          formats), as output streams:
5665
5666          * 240p (320 x 240)
5667          * 480p (640 x 480)
5668          * 720p (1280 x 720)
5669          * 1080p (1920 x 1080)
5670
5671          For LIMITED capability devices
5672          (`android.info.supportedHardwareLevel == LIMITED`),
5673          the HAL only has to list up to the maximum video size
5674          supported by the device.
5675
5676          Regardless of hardware level, every output resolution available for
5677          YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.
5678
5679          This supercedes the following fields, which are now deprecated:
5680
5681          * availableFormats
5682          * available[Processed,Raw,Jpeg]Sizes
5683          </hal_details>
5684        </entry>
5685        <entry name="availableMinFrameDurations" type="int64" visibility="ndk_public"
5686               container="array" typedef="streamConfigurationDuration" hwlevel="legacy">
5687          <array>
5688            <size>4</size>
5689            <size>n</size>
5690          </array>
5691          <description>This lists the minimum frame duration for each
5692          format/size combination.
5693          </description>
5694          <units>(format, width, height, ns) x n</units>
5695          <details>
5696          This should correspond to the frame duration when only that
5697          stream is active, with all processing (typically in android.*.mode)
5698          set to either OFF or FAST.
5699
5700          When multiple streams are used in a request, the minimum frame
5701          duration will be max(individual stream min durations).
5702
5703          The minimum frame duration of a stream (of a particular format, size)
5704          is the same regardless of whether the stream is input or output.
5705
5706          See android.sensor.frameDuration and
5707          android.scaler.availableStallDurations for more details about
5708          calculating the max frame rate.
5709          </details>
5710          <tag id="V1" />
5711        </entry>
5712        <entry name="availableStallDurations" type="int64" visibility="ndk_public"
5713               container="array" typedef="streamConfigurationDuration" hwlevel="legacy">
5714          <array>
5715            <size>4</size>
5716            <size>n</size>
5717          </array>
5718          <description>This lists the maximum stall duration for each
5719          output format/size combination.
5720          </description>
5721          <units>(format, width, height, ns) x n</units>
5722          <details>
5723          A stall duration is how much extra time would get added
5724          to the normal minimum frame duration for a repeating request
5725          that has streams with non-zero stall.
5726
5727          For example, consider JPEG captures which have the following
5728          characteristics:
5729
5730          * JPEG streams act like processed YUV streams in requests for which
5731          they are not included; in requests in which they are directly
5732          referenced, they act as JPEG streams. This is because supporting a
5733          JPEG stream requires the underlying YUV data to always be ready for
5734          use by a JPEG encoder, but the encoder will only be used (and impact
5735          frame duration) on requests that actually reference a JPEG stream.
5736          * The JPEG processor can run concurrently to the rest of the camera
5737          pipeline, but cannot process more than 1 capture at a time.
5738
5739          In other words, using a repeating YUV request would result
5740          in a steady frame rate (let's say it's 30 FPS). If a single
5741          JPEG request is submitted periodically, the frame rate will stay
5742          at 30 FPS (as long as we wait for the previous JPEG to return each
5743          time). If we try to submit a repeating YUV + JPEG request, then
5744          the frame rate will drop from 30 FPS.
5745
5746          In general, submitting a new request with a non-0 stall time
5747          stream will _not_ cause a frame rate drop unless there are still
5748          outstanding buffers for that stream from previous requests.
5749
5750          Submitting a repeating request with streams (call this `S`)
5751          is the same as setting the minimum frame duration from
5752          the normal minimum frame duration corresponding to `S`, added with
5753          the maximum stall duration for `S`.
5754
5755          If interleaving requests with and without a stall duration,
5756          a request will stall by the maximum of the remaining times
5757          for each can-stall stream with outstanding buffers.
5758
5759          This means that a stalling request will not have an exposure start
5760          until the stall has completed.
5761
5762          This should correspond to the stall duration when only that stream is
5763          active, with all processing (typically in android.*.mode) set to FAST
5764          or OFF. Setting any of the processing modes to HIGH_QUALITY
5765          effectively results in an indeterminate stall duration for all
5766          streams in a request (the regular stall calculation rules are
5767          ignored).
5768
5769          The following formats may always have a stall duration:
5770
5771          * {@link AIMAGE_FORMAT_JPEG}
5772          * {@link AIMAGE_FORMAT_RAW16}
5773
5774          The following formats will never have a stall duration:
5775
5776          * {@link AIMAGE_FORMAT_YUV_420_888}
5777          * {@link AIMAGE_FORMAT_RAW10}
5778
5779          All other formats may or may not have an allowed stall duration on
5780          a per-capability basis; refer to android.request.availableCapabilities
5781          for more details.
5782
5783          See android.sensor.frameDuration for more information about
5784          calculating the max frame rate (absent stalls).
5785          </details>
5786          <hal_details>
5787          If possible, it is recommended that all non-JPEG formats
5788          (such as RAW16) should not have a stall duration. RAW10, RAW12, RAW_OPAQUE
5789          and IMPLEMENTATION_DEFINED must not have stall durations.
5790          </hal_details>
5791          <tag id="V1" />
5792        </entry>
5793        <entry name="streamConfigurationMap" type="int32" visibility="java_public"
5794               synthetic="true" typedef="streamConfigurationMap"
5795               hwlevel="legacy">
5796          <description>The available stream configurations that this
5797          camera device supports; also includes the minimum frame durations
5798          and the stall durations for each format/size combination.
5799          </description>
5800          <details>
5801          All camera devices will support sensor maximum resolution (defined by
5802          android.sensor.info.activeArraySize) for the JPEG format.
5803
5804          For a given use case, the actual maximum supported resolution
5805          may be lower than what is listed here, depending on the destination
5806          Surface for the image data. For example, for recording video,
5807          the video encoder chosen may have a maximum size limit (e.g. 1080p)
5808          smaller than what the camera (e.g. maximum resolution is 3264x2448)
5809          can provide.
5810
5811          Please reference the documentation for the image data destination to
5812          check if it limits the maximum size for image data.
5813
5814          The following table describes the minimum required output stream
5815          configurations based on the hardware level
5816          (android.info.supportedHardwareLevel):
5817
5818          Format                                             | Size                                         | Hardware Level | Notes
5819          :-------------------------------------------------:|:--------------------------------------------:|:--------------:|:--------------:
5820          {@link android.graphics.ImageFormat#JPEG}          | android.sensor.info.activeArraySize (*1)     | Any            |
5821          {@link android.graphics.ImageFormat#JPEG}          | 1920x1080 (1080p)                            | Any            | if 1080p &lt;= activeArraySize
5822          {@link android.graphics.ImageFormat#JPEG}          | 1280x720 (720p)                               | Any            | if 720p &lt;= activeArraySize
5823          {@link android.graphics.ImageFormat#JPEG}          | 640x480 (480p)                               | Any            | if 480p &lt;= activeArraySize
5824          {@link android.graphics.ImageFormat#JPEG}          | 320x240 (240p)                               | Any            | if 240p &lt;= activeArraySize
5825          {@link android.graphics.ImageFormat#YUV_420_888}   | all output sizes available for JPEG          | FULL           |
5826          {@link android.graphics.ImageFormat#YUV_420_888}   | all output sizes available for JPEG, up to the maximum video size | LIMITED        |
5827          {@link android.graphics.ImageFormat#PRIVATE}       | same as YUV_420_888                          | Any            |
5828
5829          Refer to android.request.availableCapabilities and {@link
5830          android.hardware.camera2.CameraDevice#createCaptureSession} for additional mandatory
5831          stream configurations on a per-capability basis.
5832
5833          *1: For JPEG format, the sizes may be restricted by below conditions:
5834
5835          * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
5836          (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution
5837          (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these,
5838          it does not have to be included in the supported JPEG sizes.
5839          * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as
5840          the dimensions being a multiple of 16.
5841          Therefore, the maximum JPEG size may be smaller than sensor maximum resolution.
5842          However, the largest JPEG size will be as close as possible to the sensor maximum
5843          resolution given above constraints. It is required that after aspect ratio adjustments,
5844          additional size reduction due to other issues must be less than 3% in area. For example,
5845          if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect
5846          ratio 4:3, and the JPEG encoder alignment requirement is 16, the maximum JPEG size will be
5847          3264x2448.
5848          </details>
5849          <hal_details>
5850          Do not set this property directly
5851          (it is synthetic and will not be available at the HAL layer);
5852          set the android.scaler.availableStreamConfigurations instead.
5853
5854          Not all output formats may be supported in a configuration with
5855          an input stream of a particular format. For more details, see
5856          android.scaler.availableInputOutputFormatsMap.
5857
5858          It is recommended (but not mandatory) to also include half/quarter
5859          of sensor maximum resolution for JPEG formats (regardless of hardware
5860          level).
5861
5862          (The following is a rewording of the above required table):
5863
5864          The HAL must include sensor maximum resolution (defined by
5865          android.sensor.info.activeArraySize).
5866
5867          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
5868          the HAL must include all YUV_420_888 sizes that have JPEG sizes listed
5869          here as output streams.
5870
5871          It must also include each below resolution if it is smaller than or
5872          equal to the sensor maximum resolution (for both YUV_420_888 and JPEG
5873          formats), as output streams:
5874
5875          * 240p (320 x 240)
5876          * 480p (640 x 480)
5877          * 720p (1280 x 720)
5878          * 1080p (1920 x 1080)
5879
5880          For LIMITED capability devices
5881          (`android.info.supportedHardwareLevel == LIMITED`),
5882          the HAL only has to list up to the maximum video size
5883          supported by the device.
5884
5885          Regardless of hardware level, every output resolution available for
5886          YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.
5887
5888          This supercedes the following fields, which are now deprecated:
5889
5890          * availableFormats
5891          * available[Processed,Raw,Jpeg]Sizes
5892          </hal_details>
5893        </entry>
5894        <entry name="croppingType" type="byte" visibility="public" enum="true"
5895               hwlevel="legacy">
5896          <enum>
5897            <value>CENTER_ONLY
5898              <notes>
5899                The camera device only supports centered crop regions.
5900              </notes>
5901            </value>
5902            <value>FREEFORM
5903              <notes>
5904                The camera device supports arbitrarily chosen crop regions.
5905              </notes>
5906            </value>
5907          </enum>
5908          <description>The crop type that this camera device supports.</description>
5909          <details>
5910          When passing a non-centered crop region (android.scaler.cropRegion) to a camera
5911          device that only supports CENTER_ONLY cropping, the camera device will move the
5912          crop region to the center of the sensor active array (android.sensor.info.activeArraySize)
5913          and keep the crop region width and height unchanged. The camera device will return the
5914          final used crop region in metadata result android.scaler.cropRegion.
5915
5916          Camera devices that support FREEFORM cropping will support any crop region that
5917          is inside of the active array. The camera device will apply the same crop region and
5918          return the final used crop region in capture result metadata android.scaler.cropRegion.
5919
5920          LEGACY capability devices will only support CENTER_ONLY cropping.
5921          </details>
5922        </entry>
5923      </static>
5924    </section>
5925    <section name="sensor">
5926      <controls>
5927        <entry name="exposureTime" type="int64" visibility="public" hwlevel="full">
5928          <description>Duration each pixel is exposed to
5929          light.</description>
5930          <units>Nanoseconds</units>
5931          <range>android.sensor.info.exposureTimeRange</range>
5932          <details>If the sensor can't expose this exact duration, it will shorten the
5933          duration exposed to the nearest possible value (rather than expose longer).
5934          The final exposure time used will be available in the output capture result.
5935
5936          This control is only effective if android.control.aeMode or android.control.mode is set to
5937          OFF; otherwise the auto-exposure algorithm will override this value.
5938          </details>
5939          <tag id="V1" />
5940        </entry>
5941        <entry name="frameDuration" type="int64" visibility="public" hwlevel="full">
5942          <description>Duration from start of frame exposure to
5943          start of next frame exposure.</description>
5944          <units>Nanoseconds</units>
5945          <range>See android.sensor.info.maxFrameDuration,
5946          ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS. The duration
5947          is capped to `max(duration, exposureTime + overhead)`.</range>
5948          <details>
5949          The maximum frame rate that can be supported by a camera subsystem is
5950          a function of many factors:
5951
5952          * Requested resolutions of output image streams
5953          * Availability of binning / skipping modes on the imager
5954          * The bandwidth of the imager interface
5955          * The bandwidth of the various ISP processing blocks
5956
5957          Since these factors can vary greatly between different ISPs and
5958          sensors, the camera abstraction tries to represent the bandwidth
5959          restrictions with as simple a model as possible.
5960
5961          The model presented has the following characteristics:
5962
5963          * The image sensor is always configured to output the smallest
5964          resolution possible given the application's requested output stream
5965          sizes.  The smallest resolution is defined as being at least as large
5966          as the largest requested output stream size; the camera pipeline must
5967          never digitally upsample sensor data when the crop region covers the
5968          whole sensor. In general, this means that if only small output stream
5969          resolutions are configured, the sensor can provide a higher frame
5970          rate.
5971          * Since any request may use any or all the currently configured
5972          output streams, the sensor and ISP must be configured to support
5973          scaling a single capture to all the streams at the same time.  This
5974          means the camera pipeline must be ready to produce the largest
5975          requested output size without any delay.  Therefore, the overall
5976          frame rate of a given configured stream set is governed only by the
5977          largest requested stream resolution.
5978          * Using more than one output stream in a request does not affect the
5979          frame duration.
5980          * Certain format-streams may need to do additional background processing
5981          before data is consumed/produced by that stream. These processors
5982          can run concurrently to the rest of the camera pipeline, but
5983          cannot process more than 1 capture at a time.
5984
5985          The necessary information for the application, given the model above,
5986          is provided via
5987          {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}.
5988          These are used to determine the maximum frame rate / minimum frame
5989          duration that is possible for a given stream configuration.
5990
5991          Specifically, the application can use the following rules to
5992          determine the minimum frame duration it can request from the camera
5993          device:
5994
5995          1. Let the set of currently configured input/output streams
5996          be called `S`.
5997          1. Find the minimum frame durations for each stream in `S`, by looking
5998          it up in {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
5999          (with its respective size/format). Let this set of frame durations be
6000          called `F`.
6001          1. For any given request `R`, the minimum frame duration allowed
6002          for `R` is the maximum out of all values in `F`. Let the streams
6003          used in `R` be called `S_r`.
6004
6005          If none of the streams in `S_r` have a stall time (listed in {@link
6006          ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}
6007          using its respective size/format), then the frame duration in `F`
6008          determines the steady state frame rate that the application will get
6009          if it uses `R` as a repeating request. Let this special kind of
6010          request be called `Rsimple`.
6011
6012          A repeating request `Rsimple` can be _occasionally_ interleaved
6013          by a single capture of a new request `Rstall` (which has at least
6014          one in-use stream with a non-0 stall time) and if `Rstall` has the
6015          same minimum frame duration this will not cause a frame rate loss
6016          if all buffers from the previous `Rstall` have already been
6017          delivered.
6018
6019          For more details about stalling, see
6020          {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}.
6021
6022          This control is only effective if android.control.aeMode or android.control.mode is set to
6023          OFF; otherwise the auto-exposure algorithm will override this value.
6024          </details>
6025          <hal_details>
6026          For more details about stalling, see
6027          android.scaler.availableStallDurations.
6028          </hal_details>
6029          <tag id="V1" />
6030        </entry>
6031        <entry name="sensitivity" type="int32" visibility="public" hwlevel="full">
6032          <description>The amount of gain applied to sensor data
6033          before processing.</description>
6034          <units>ISO arithmetic units</units>
6035          <range>android.sensor.info.sensitivityRange</range>
6036          <details>
6037          The sensitivity is the standard ISO sensitivity value,
6038          as defined in ISO 12232:2006.
6039
6040          The sensitivity must be within android.sensor.info.sensitivityRange, and
6041          if if it less than android.sensor.maxAnalogSensitivity, the camera device
6042          is guaranteed to use only analog amplification for applying the gain.
6043
6044          If the camera device cannot apply the exact sensitivity
6045          requested, it will reduce the gain to the nearest supported
6046          value. The final sensitivity used will be available in the
6047          output capture result.
6048
6049          This control is only effective if android.control.aeMode or android.control.mode is set to
6050          OFF; otherwise the auto-exposure algorithm will override this value.
6051          </details>
6052          <hal_details>ISO 12232:2006 REI method is acceptable.</hal_details>
6053          <tag id="V1" />
6054        </entry>
6055      </controls>
6056      <static>
6057        <namespace name="info">
6058          <entry name="activeArraySize" type="int32" visibility="public"
6059          type_notes="Four ints defining the active pixel rectangle"
6060          container="array" typedef="rectangle" hwlevel="legacy">
6061            <array>
6062              <size>4</size>
6063            </array>
6064            <description>
6065            The area of the image sensor which corresponds to active pixels after any geometric
6066            distortion correction has been applied.
6067            </description>
6068            <units>Pixel coordinates on the image sensor</units>
6069            <details>
6070            This is the rectangle representing the size of the active region of the sensor (i.e.
6071            the region that actually receives light from the scene) after any geometric correction
6072            has been applied, and should be treated as the maximum size in pixels of any of the
6073            image output formats aside from the raw formats.
6074
6075            This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
6076            the full pixel array, and the size of the full pixel array is given by
6077            android.sensor.info.pixelArraySize.
6078
6079            The data representation is int[4], which maps to (left, top, width, height).
6080
6081            The coordinate system for most other keys that list pixel coordinates, including
6082            android.scaler.cropRegion, is defined relative to the active array rectangle given in
6083            this field, with `(0, 0)` being the top-left of this rectangle.
6084
6085            The active array may be smaller than the full pixel array, since the full array may
6086            include black calibration pixels or other inactive regions, and geometric correction
6087            resulting in scaling or cropping may have been applied.
6088            </details>
6089            <hal_details>
6090            This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
6091            &amp;gt;= `(0,0)`.
6092            The `(width, height)` must be &amp;lt;= `android.sensor.info.pixelArraySize`.
6093            </hal_details>
6094            <tag id="RAW" />
6095          </entry>
6096          <entry name="sensitivityRange" type="int32" visibility="public"
6097          type_notes="Range of supported sensitivities"
6098          container="array" typedef="rangeInt"
6099          hwlevel="full">
6100            <array>
6101              <size>2</size>
6102            </array>
6103            <description>Range of sensitivities for android.sensor.sensitivity supported by this
6104            camera device.</description>
6105            <range>Min &lt;= 100, Max &amp;gt;= 800</range>
6106            <details>
6107              The values are the standard ISO sensitivity values,
6108              as defined in ISO 12232:2006.
6109            </details>
6110
6111            <tag id="BC" />
6112            <tag id="V1" />
6113          </entry>
6114          <entry name="colorFilterArrangement" type="byte" visibility="public" enum="true"
6115            hwlevel="full">
6116            <enum>
6117              <value>RGGB</value>
6118              <value>GRBG</value>
6119              <value>GBRG</value>
6120              <value>BGGR</value>
6121              <value>RGB
6122              <notes>Sensor is not Bayer; output has 3 16-bit
6123              values for each pixel, instead of just 1 16-bit value
6124              per pixel.</notes></value>
6125            </enum>
6126            <description>The arrangement of color filters on sensor;
6127            represents the colors in the top-left 2x2 section of
6128            the sensor, in reading order.</description>
6129            <tag id="RAW" />
6130          </entry>
6131          <entry name="exposureTimeRange" type="int64" visibility="public"
6132                 type_notes="nanoseconds" container="array" typedef="rangeLong"
6133                 hwlevel="full">
6134            <array>
6135              <size>2</size>
6136            </array>
6137            <description>The range of image exposure times for android.sensor.exposureTime supported
6138            by this camera device.
6139            </description>
6140            <units>Nanoseconds</units>
6141            <range>The minimum exposure time will be less than 100 us. For FULL
6142            capability devices (android.info.supportedHardwareLevel == FULL),
6143            the maximum exposure time will be greater than 100ms.</range>
6144            <hal_details>For FULL capability devices (android.info.supportedHardwareLevel == FULL),
6145            The maximum of the range SHOULD be at least 1 second (1e9), MUST be at least
6146            100ms.
6147            </hal_details>
6148            <tag id="V1" />
6149          </entry>
6150          <entry name="maxFrameDuration" type="int64" visibility="public"
6151                 hwlevel="full">
6152            <description>The maximum possible frame duration (minimum frame rate) for
6153            android.sensor.frameDuration that is supported this camera device.</description>
6154            <units>Nanoseconds</units>
6155            <range>For FULL capability devices
6156            (android.info.supportedHardwareLevel == FULL), at least 100ms.
6157            </range>
6158            <details>Attempting to use frame durations beyond the maximum will result in the frame
6159            duration being clipped to the maximum. See that control for a full definition of frame
6160            durations.
6161
6162            Refer to {@link
6163            ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
6164            for the minimum frame duration values.
6165            </details>
6166            <hal_details>
6167            For FULL capability devices (android.info.supportedHardwareLevel == FULL),
6168            The maximum of the range SHOULD be at least
6169            1 second (1e9), MUST be at least 100ms (100e6).
6170
6171            android.sensor.info.maxFrameDuration must be greater or
6172            equal to the android.sensor.info.exposureTimeRange max
6173            value (since exposure time overrides frame duration).
6174
6175            Available minimum frame durations for JPEG must be no greater
6176            than that of the YUV_420_888/IMPLEMENTATION_DEFINED
6177            minimum frame durations (for that respective size).
6178
6179            Since JPEG processing is considered offline and can take longer than
6180            a single uncompressed capture, refer to
6181            android.scaler.availableStallDurations
6182            for details about encoding this scenario.
6183            </hal_details>
6184            <tag id="V1" />
6185          </entry>
6186          <entry name="physicalSize" type="float" visibility="public"
6187          type_notes="width x height"
6188          container="array" typedef="sizeF" hwlevel="legacy">
6189            <array>
6190              <size>2</size>
6191            </array>
6192            <description>The physical dimensions of the full pixel
6193            array.</description>
6194            <units>Millimeters</units>
6195            <details>This is the physical size of the sensor pixel
6196            array defined by android.sensor.info.pixelArraySize.
6197            </details>
6198            <hal_details>Needed for FOV calculation for old API</hal_details>
6199            <tag id="V1" />
6200            <tag id="BC" />
6201          </entry>
6202          <entry name="pixelArraySize" type="int32" visibility="public"
6203          container="array" typedef="size" hwlevel="legacy">
6204            <array>
6205              <size>2</size>
6206            </array>
6207            <description>Dimensions of the full pixel array, possibly
6208            including black calibration pixels.</description>
6209            <units>Pixels</units>
6210            <details>The pixel count of the full pixel array of the image sensor, which covers
6211            android.sensor.info.physicalSize area.  This represents the full pixel dimensions of
6212            the raw buffers produced by this sensor.
6213
6214            If a camera device supports raw sensor formats, either this or
6215            android.sensor.info.preCorrectionActiveArraySize is the maximum dimensions for the raw
6216            output formats listed in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS (this depends on
6217            whether or not the image sensor returns buffers containing pixels that are not
6218            part of the active array region for blacklevel calibration or other purposes).
6219
6220            Some parts of the full pixel array may not receive light from the scene,
6221            or be otherwise inactive.  The android.sensor.info.preCorrectionActiveArraySize key
6222            defines the rectangle of active pixels that will be included in processed image
6223            formats.
6224            </details>
6225            <tag id="RAW" />
6226            <tag id="BC" />
6227          </entry>
6228          <entry name="whiteLevel" type="int32" visibility="public">
6229            <description>
6230            Maximum raw value output by sensor.
6231            </description>
6232            <range>&amp;gt; 255 (8-bit output)</range>
6233            <details>
6234            This specifies the fully-saturated encoding level for the raw
6235            sample values from the sensor.  This is typically caused by the
6236            sensor becoming highly non-linear or clipping. The minimum for
6237            each channel is specified by the offset in the
6238            android.sensor.blackLevelPattern key.
6239
6240            The white level is typically determined either by sensor bit depth
6241            (8-14 bits is expected), or by the point where the sensor response
6242            becomes too non-linear to be useful.  The default value for this is
6243            maximum representable value for a 16-bit raw sample (2^16 - 1).
6244
6245            The white level values of captured images may vary for different
6246            capture settings (e.g., android.sensor.sensitivity). This key
6247            represents a coarse approximation for such case. It is recommended
6248            to use android.sensor.dynamicWhiteLevel for captures when supported
6249            by the camera device, which provides more accurate white level values.
6250            </details>
6251            <hal_details>
6252            The full bit depth of the sensor must be available in the raw data,
6253            so the value for linear sensors should not be significantly lower
6254            than maximum raw value supported, i.e. 2^(sensor bits per pixel).
6255            </hal_details>
6256            <tag id="RAW" />
6257          </entry>
6258          <entry name="timestampSource" type="byte" visibility="public"
6259                 enum="true" hwlevel="legacy">
6260            <enum>
6261              <value>UNKNOWN
6262                <notes>
6263                Timestamps from android.sensor.timestamp are in nanoseconds and monotonic,
6264                but can not be compared to timestamps from other subsystems
6265                (e.g. accelerometer, gyro etc.), or other instances of the same or different
6266                camera devices in the same system. Timestamps between streams and results for
6267                a single camera instance are comparable, and the timestamps for all buffers
6268                and the result metadata generated by a single capture are identical.
6269                </notes>
6270              </value>
6271              <value>REALTIME
6272                <notes>
6273                Timestamps from android.sensor.timestamp are in the same timebase as
6274                [elapsedRealtimeNanos](https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos)
6275                (or CLOCK_BOOTTIME), and they can be compared to other timestamps using that base.
6276                </notes>
6277              </value>
6278            </enum>
6279            <description>The time base source for sensor capture start timestamps.</description>
6280            <details>
6281            The timestamps provided for captures are always in nanoseconds and monotonic, but
6282            may not based on a time source that can be compared to other system time sources.
6283
6284            This characteristic defines the source for the timestamps, and therefore whether they
6285            can be compared against other system time sources/timestamps.
6286            </details>
6287          <tag id="V1" />
6288        </entry>
6289        <entry name="lensShadingApplied" type="byte" visibility="public" enum="true"
6290               typedef="boolean">
6291          <enum>
6292            <value>FALSE</value>
6293            <value>TRUE</value>
6294          </enum>
6295          <description>Whether the RAW images output from this camera device are subject to
6296          lens shading correction.</description>
6297          <details>
6298          If TRUE, all images produced by the camera device in the RAW image formats will
6299          have lens shading correction already applied to it. If FALSE, the images will
6300          not be adjusted for lens shading correction.
6301          See android.request.maxNumOutputRaw for a list of RAW image formats.
6302
6303          This key will be `null` for all devices do not report this information.
6304          Devices with RAW capability will always report this information in this key.
6305          </details>
6306        </entry>
6307        <entry name="preCorrectionActiveArraySize" type="int32" visibility="public"
6308          type_notes="Four ints defining the active pixel rectangle" container="array"
6309          typedef="rectangle" hwlevel="legacy">
6310            <array>
6311              <size>4</size>
6312            </array>
6313            <description>
6314            The area of the image sensor which corresponds to active pixels prior to the
6315            application of any geometric distortion correction.
6316            </description>
6317            <units>Pixel coordinates on the image sensor</units>
6318            <details>
6319            The data representation is int[4], which maps to (left, top, width, height).
6320
6321            This is the rectangle representing the size of the active region of the sensor (i.e.
6322            the region that actually receives light from the scene) before any geometric correction
6323            has been applied, and should be treated as the active region rectangle for any of the
6324            raw formats.  All metadata associated with raw processing (e.g. the lens shading
6325            correction map, and radial distortion fields) treats the top, left of this rectangle as
6326            the origin, (0,0).
6327
6328            The size of this region determines the maximum field of view and the maximum number of
6329            pixels that an image from this sensor can contain, prior to the application of
6330            geometric distortion correction. The effective maximum pixel dimensions of a
6331            post-distortion-corrected image is given by the android.sensor.info.activeArraySize
6332            field, and the effective maximum field of view for a post-distortion-corrected image
6333            can be calculated by applying the geometric distortion correction fields to this
6334            rectangle, and cropping to the rectangle given in android.sensor.info.activeArraySize.
6335
6336            E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the
6337            dimensions in android.sensor.info.activeArraySize given the position of a pixel,
6338            (x', y'), in the raw pixel array with dimensions give in
6339            android.sensor.info.pixelArraySize:
6340
6341            1. Choose a pixel (x', y') within the active array region of the raw buffer given in
6342            android.sensor.info.preCorrectionActiveArraySize, otherwise this pixel is considered
6343            to be outside of the FOV, and will not be shown in the processed output image.
6344            1. Apply geometric distortion correction to get the post-distortion pixel coordinate,
6345            (x_i, y_i). When applying geometric correction metadata, note that metadata for raw
6346            buffers is defined relative to the top, left of the
6347            android.sensor.info.preCorrectionActiveArraySize rectangle.
6348            1. If the resulting corrected pixel coordinate is within the region given in
6349            android.sensor.info.activeArraySize, then the position of this pixel in the
6350            processed output image buffer is `(x_i - activeArray.left, y_i - activeArray.top)`,
6351            when the top, left coordinate of that buffer is treated as (0, 0).
6352
6353            Thus, for pixel x',y' = (25, 25) on a sensor where android.sensor.info.pixelArraySize
6354            is (100,100), android.sensor.info.preCorrectionActiveArraySize is (10, 10, 100, 100),
6355            android.sensor.info.activeArraySize is (20, 20, 80, 80), and the geometric distortion
6356            correction doesn't change the pixel coordinate, the resulting pixel selected in
6357            pixel coordinates would be x,y = (25, 25) relative to the top,left of the raw buffer
6358            with dimensions given in android.sensor.info.pixelArraySize, and would be (5, 5)
6359            relative to the top,left of post-processed YUV output buffer with dimensions given in
6360            android.sensor.info.activeArraySize.
6361
6362            The currently supported fields that correct for geometric distortion are:
6363
6364            1. android.lens.radialDistortion.
6365
6366            If all of the geometric distortion fields are no-ops, this rectangle will be the same
6367            as the post-distortion-corrected rectangle given in
6368            android.sensor.info.activeArraySize.
6369
6370            This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
6371            the full pixel array, and the size of the full pixel array is given by
6372            android.sensor.info.pixelArraySize.
6373
6374            The pre-correction active array may be smaller than the full pixel array, since the
6375            full array may include black calibration pixels or other inactive regions.
6376            </details>
6377            <hal_details>
6378            This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
6379            &amp;gt;= `(0,0)`.
6380            The `(width, height)` must be &amp;lt;= `android.sensor.info.pixelArraySize`.
6381
6382            If omitted by the HAL implementation, the camera framework will assume that this is
6383            the same as the post-correction active array region given in
6384            android.sensor.info.activeArraySize.
6385            </hal_details>
6386            <tag id="RAW" />
6387          </entry>
6388        </namespace>
6389        <entry name="referenceIlluminant1" type="byte" visibility="public"
6390               enum="true">
6391          <enum>
6392            <value id="1">DAYLIGHT</value>
6393            <value id="2">FLUORESCENT</value>
6394            <value id="3">TUNGSTEN
6395              <notes>Incandescent light</notes>
6396            </value>
6397            <value id="4">FLASH</value>
6398            <value id="9">FINE_WEATHER</value>
6399            <value id="10">CLOUDY_WEATHER</value>
6400            <value id="11">SHADE</value>
6401            <value id="12">DAYLIGHT_FLUORESCENT
6402              <notes>D 5700 - 7100K</notes>
6403            </value>
6404            <value id="13">DAY_WHITE_FLUORESCENT
6405              <notes>N 4600 - 5400K</notes>
6406            </value>
6407            <value id="14">COOL_WHITE_FLUORESCENT
6408              <notes>W 3900 - 4500K</notes>
6409            </value>
6410            <value id="15">WHITE_FLUORESCENT
6411              <notes>WW 3200 - 3700K</notes>
6412            </value>
6413            <value id="17">STANDARD_A</value>
6414            <value id="18">STANDARD_B</value>
6415            <value id="19">STANDARD_C</value>
6416            <value id="20">D55</value>
6417            <value id="21">D65</value>
6418            <value id="22">D75</value>
6419            <value id="23">D50</value>
6420            <value id="24">ISO_STUDIO_TUNGSTEN</value>
6421          </enum>
6422          <description>
6423          The standard reference illuminant used as the scene light source when
6424          calculating the android.sensor.colorTransform1,
6425          android.sensor.calibrationTransform1, and
6426          android.sensor.forwardMatrix1 matrices.
6427          </description>
6428          <details>
6429          The values in this key correspond to the values defined for the
6430          EXIF LightSource tag. These illuminants are standard light sources
6431          that are often used calibrating camera devices.
6432
6433          If this key is present, then android.sensor.colorTransform1,
6434          android.sensor.calibrationTransform1, and
6435          android.sensor.forwardMatrix1 will also be present.
6436
6437          Some devices may choose to provide a second set of calibration
6438          information for improved quality, including
6439          android.sensor.referenceIlluminant2 and its corresponding matrices.
6440          </details>
6441          <hal_details>
6442          The first reference illuminant (android.sensor.referenceIlluminant1)
6443          and corresponding matrices must be present to support the RAW capability
6444          and DNG output.
6445
6446          When producing raw images with a color profile that has only been
6447          calibrated against a single light source, it is valid to omit
6448          android.sensor.referenceIlluminant2 along with the
6449          android.sensor.colorTransform2, android.sensor.calibrationTransform2,
6450          and android.sensor.forwardMatrix2 matrices.
6451
6452          If only android.sensor.referenceIlluminant1 is included, it should be
6453          chosen so that it is representative of typical scene lighting.  In
6454          general, D50 or DAYLIGHT will be chosen for this case.
6455
6456          If both android.sensor.referenceIlluminant1 and
6457          android.sensor.referenceIlluminant2 are included, they should be
6458          chosen to represent the typical range of scene lighting conditions.
6459          In general, low color temperature illuminant such as Standard-A will
6460          be chosen for the first reference illuminant and a higher color
6461          temperature illuminant such as D65 will be chosen for the second
6462          reference illuminant.
6463          </hal_details>
6464          <tag id="RAW" />
6465        </entry>
6466        <entry name="referenceIlluminant2" type="byte" visibility="public">
6467          <description>
6468          The standard reference illuminant used as the scene light source when
6469          calculating the android.sensor.colorTransform2,
6470          android.sensor.calibrationTransform2, and
6471          android.sensor.forwardMatrix2 matrices.
6472          </description>
6473          <range>Any value listed in android.sensor.referenceIlluminant1</range>
6474          <details>
6475          See android.sensor.referenceIlluminant1 for more details.
6476
6477          If this key is present, then android.sensor.colorTransform2,
6478          android.sensor.calibrationTransform2, and
6479          android.sensor.forwardMatrix2 will also be present.
6480          </details>
6481          <tag id="RAW" />
6482        </entry>
6483        <entry name="calibrationTransform1" type="rational"
6484        visibility="public" optional="true"
6485        type_notes="3x3 matrix in row-major-order" container="array"
6486        typedef="colorSpaceTransform">
6487          <array>
6488            <size>3</size>
6489            <size>3</size>
6490          </array>
6491          <description>
6492          A per-device calibration transform matrix that maps from the
6493          reference sensor colorspace to the actual device sensor colorspace.
6494          </description>
6495          <details>
6496          This matrix is used to correct for per-device variations in the
6497          sensor colorspace, and is used for processing raw buffer data.
6498
6499          The matrix is expressed as a 3x3 matrix in row-major-order, and
6500          contains a per-device calibration transform that maps colors
6501          from reference sensor color space (i.e. the "golden module"
6502          colorspace) into this camera device's native sensor color
6503          space under the first reference illuminant
6504          (android.sensor.referenceIlluminant1).
6505          </details>
6506          <tag id="RAW" />
6507        </entry>
6508        <entry name="calibrationTransform2" type="rational"
6509        visibility="public" optional="true"
6510        type_notes="3x3 matrix in row-major-order" container="array"
6511        typedef="colorSpaceTransform">
6512          <array>
6513            <size>3</size>
6514            <size>3</size>
6515          </array>
6516          <description>
6517          A per-device calibration transform matrix that maps from the
6518          reference sensor colorspace to the actual device sensor colorspace
6519          (this is the colorspace of the raw buffer data).
6520          </description>
6521          <details>
6522          This matrix is used to correct for per-device variations in the
6523          sensor colorspace, and is used for processing raw buffer data.
6524
6525          The matrix is expressed as a 3x3 matrix in row-major-order, and
6526          contains a per-device calibration transform that maps colors
6527          from reference sensor color space (i.e. the "golden module"
6528          colorspace) into this camera device's native sensor color
6529          space under the second reference illuminant
6530          (android.sensor.referenceIlluminant2).
6531
6532          This matrix will only be present if the second reference
6533          illuminant is present.
6534          </details>
6535          <tag id="RAW" />
6536        </entry>
6537        <entry name="colorTransform1" type="rational"
6538        visibility="public" optional="true"
6539        type_notes="3x3 matrix in row-major-order" container="array"
6540        typedef="colorSpaceTransform">
6541          <array>
6542            <size>3</size>
6543            <size>3</size>
6544          </array>
6545          <description>
6546          A matrix that transforms color values from CIE XYZ color space to
6547          reference sensor color space.
6548          </description>
6549          <details>
6550          This matrix is used to convert from the standard CIE XYZ color
6551          space to the reference sensor colorspace, and is used when processing
6552          raw buffer data.
6553
6554          The matrix is expressed as a 3x3 matrix in row-major-order, and
6555          contains a color transform matrix that maps colors from the CIE
6556          XYZ color space to the reference sensor color space (i.e. the
6557          "golden module" colorspace) under the first reference illuminant
6558          (android.sensor.referenceIlluminant1).
6559
6560          The white points chosen in both the reference sensor color space
6561          and the CIE XYZ colorspace when calculating this transform will
6562          match the standard white point for the first reference illuminant
6563          (i.e. no chromatic adaptation will be applied by this transform).
6564          </details>
6565          <tag id="RAW" />
6566        </entry>
6567        <entry name="colorTransform2" type="rational"
6568        visibility="public" optional="true"
6569        type_notes="3x3 matrix in row-major-order" container="array"
6570        typedef="colorSpaceTransform">
6571          <array>
6572            <size>3</size>
6573            <size>3</size>
6574          </array>
6575          <description>
6576          A matrix that transforms color values from CIE XYZ color space to
6577          reference sensor color space.
6578          </description>
6579          <details>
6580          This matrix is used to convert from the standard CIE XYZ color
6581          space to the reference sensor colorspace, and is used when processing
6582          raw buffer data.
6583
6584          The matrix is expressed as a 3x3 matrix in row-major-order, and
6585          contains a color transform matrix that maps colors from the CIE
6586          XYZ color space to the reference sensor color space (i.e. the
6587          "golden module" colorspace) under the second reference illuminant
6588          (android.sensor.referenceIlluminant2).
6589
6590          The white points chosen in both the reference sensor color space
6591          and the CIE XYZ colorspace when calculating this transform will
6592          match the standard white point for the second reference illuminant
6593          (i.e. no chromatic adaptation will be applied by this transform).
6594
6595          This matrix will only be present if the second reference
6596          illuminant is present.
6597          </details>
6598          <tag id="RAW" />
6599        </entry>
6600        <entry name="forwardMatrix1" type="rational"
6601        visibility="public" optional="true"
6602        type_notes="3x3 matrix in row-major-order" container="array"
6603        typedef="colorSpaceTransform">
6604          <array>
6605            <size>3</size>
6606            <size>3</size>
6607          </array>
6608          <description>
6609          A matrix that transforms white balanced camera colors from the reference
6610          sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
6611          </description>
6612          <details>
6613          This matrix is used to convert to the standard CIE XYZ colorspace, and
6614          is used when processing raw buffer data.
6615
6616          This matrix is expressed as a 3x3 matrix in row-major-order, and contains
6617          a color transform matrix that maps white balanced colors from the
6618          reference sensor color space to the CIE XYZ color space with a D50 white
6619          point.
6620
6621          Under the first reference illuminant (android.sensor.referenceIlluminant1)
6622          this matrix is chosen so that the standard white point for this reference
6623          illuminant in the reference sensor colorspace is mapped to D50 in the
6624          CIE XYZ colorspace.
6625          </details>
6626          <tag id="RAW" />
6627        </entry>
6628        <entry name="forwardMatrix2" type="rational"
6629        visibility="public" optional="true"
6630        type_notes="3x3 matrix in row-major-order" container="array"
6631        typedef="colorSpaceTransform">
6632          <array>
6633            <size>3</size>
6634            <size>3</size>
6635          </array>
6636          <description>
6637          A matrix that transforms white balanced camera colors from the reference
6638          sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
6639          </description>
6640          <details>
6641          This matrix is used to convert to the standard CIE XYZ colorspace, and
6642          is used when processing raw buffer data.
6643
6644          This matrix is expressed as a 3x3 matrix in row-major-order, and contains
6645          a color transform matrix that maps white balanced colors from the
6646          reference sensor color space to the CIE XYZ color space with a D50 white
6647          point.
6648
6649          Under the second reference illuminant (android.sensor.referenceIlluminant2)
6650          this matrix is chosen so that the standard white point for this reference
6651          illuminant in the reference sensor colorspace is mapped to D50 in the
6652          CIE XYZ colorspace.
6653
6654          This matrix will only be present if the second reference
6655          illuminant is present.
6656          </details>
6657          <tag id="RAW" />
6658        </entry>
6659        <entry name="baseGainFactor" type="rational"
6660        optional="true">
6661          <description>Gain factor from electrons to raw units when
6662          ISO=100</description>
6663          <tag id="FUTURE" />
6664        </entry>
6665        <entry name="blackLevelPattern" type="int32" visibility="public"
6666        optional="true" type_notes="2x2 raw count block" container="array"
6667        typedef="blackLevelPattern">
6668          <array>
6669            <size>4</size>
6670          </array>
6671          <description>
6672          A fixed black level offset for each of the color filter arrangement
6673          (CFA) mosaic channels.
6674          </description>
6675          <range>&amp;gt;= 0 for each.</range>
6676          <details>
6677          This key specifies the zero light value for each of the CFA mosaic
6678          channels in the camera sensor.  The maximal value output by the
6679          sensor is represented by the value in android.sensor.info.whiteLevel.
6680
6681          The values are given in the same order as channels listed for the CFA
6682          layout key (see android.sensor.info.colorFilterArrangement), i.e. the
6683          nth value given corresponds to the black level offset for the nth
6684          color channel listed in the CFA.
6685
6686          The black level values of captured images may vary for different
6687          capture settings (e.g., android.sensor.sensitivity). This key
6688          represents a coarse approximation for such case. It is recommended to
6689          use android.sensor.dynamicBlackLevel or use pixels from
6690          android.sensor.opticalBlackRegions directly for captures when
6691          supported by the camera device, which provides more accurate black
6692          level values. For raw capture in particular, it is recommended to use
6693          pixels from android.sensor.opticalBlackRegions to calculate black
6694          level values for each frame.
6695          </details>
6696          <hal_details>
6697          The values are given in row-column scan order, with the first value
6698          corresponding to the element of the CFA in row=0, column=0.
6699          </hal_details>
6700          <tag id="RAW" />
6701        </entry>
6702        <entry name="maxAnalogSensitivity" type="int32" visibility="public"
6703               optional="true" hwlevel="full">
6704          <description>Maximum sensitivity that is implemented
6705          purely through analog gain.</description>
6706          <details>For android.sensor.sensitivity values less than or
6707          equal to this, all applied gain must be analog. For
6708          values above this, the gain applied can be a mix of analog and
6709          digital.</details>
6710          <tag id="V1" />
6711          <tag id="FULL" />
6712        </entry>
6713        <entry name="orientation" type="int32" visibility="public"
6714               hwlevel="legacy">
6715          <description>Clockwise angle through which the output image needs to be rotated to be
6716          upright on the device screen in its native orientation.
6717          </description>
6718          <units>Degrees of clockwise rotation; always a multiple of
6719          90</units>
6720          <range>0, 90, 180, 270</range>
6721          <details>
6722          Also defines the direction of rolling shutter readout, which is from top to bottom in
6723          the sensor's coordinate system.
6724          </details>
6725          <tag id="BC" />
6726        </entry>
6727        <entry name="profileHueSatMapDimensions" type="int32"
6728        visibility="system" optional="true"
6729        type_notes="Number of samples for hue, saturation, and value"
6730        container="array">
6731          <array>
6732            <size>3</size>
6733          </array>
6734          <description>
6735          The number of input samples for each dimension of
6736          android.sensor.profileHueSatMap.
6737          </description>
6738          <range>
6739          Hue &amp;gt;= 1,
6740          Saturation &amp;gt;= 2,
6741          Value &amp;gt;= 1
6742          </range>
6743          <details>
6744          The number of input samples for the hue, saturation, and value
6745          dimension of android.sensor.profileHueSatMap. The order of the
6746          dimensions given is hue, saturation, value; where hue is the 0th
6747          element.
6748          </details>
6749          <tag id="RAW" />
6750        </entry>
6751      </static>
6752      <dynamic>
6753        <clone entry="android.sensor.exposureTime" kind="controls">
6754        </clone>
6755        <clone entry="android.sensor.frameDuration"
6756        kind="controls"></clone>
6757        <clone entry="android.sensor.sensitivity" kind="controls">
6758        </clone>
6759        <entry name="timestamp" type="int64" visibility="public"
6760               hwlevel="legacy">
6761          <description>Time at start of exposure of first
6762          row of the image sensor active array, in nanoseconds.</description>
6763          <units>Nanoseconds</units>
6764          <range>&amp;gt; 0</range>
6765          <details>The timestamps are also included in all image
6766          buffers produced for the same capture, and will be identical
6767          on all the outputs.
6768
6769          When android.sensor.info.timestampSource `==` UNKNOWN,
6770          the timestamps measure time since an unspecified starting point,
6771          and are monotonically increasing. They can be compared with the
6772          timestamps for other captures from the same camera device, but are
6773          not guaranteed to be comparable to any other time source.
6774
6775          When android.sensor.info.timestampSource `==` REALTIME, the
6776          timestamps measure time in the same timebase as
6777          [elapsedRealtimeNanos](https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos)
6778          (or CLOCK_BOOTTIME), and they can
6779          be compared to other timestamps from other subsystems that
6780          are using that base.
6781
6782          For reprocessing, the timestamp will match the start of exposure of
6783          the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the
6784          timestamp} in the TotalCaptureResult that was used to create the
6785          reprocess capture request.
6786          </details>
6787          <hal_details>
6788          All timestamps must be in reference to the kernel's
6789          CLOCK_BOOTTIME monotonic clock, which properly accounts for
6790          time spent asleep. This allows for synchronization with
6791          sensors that continue to operate while the system is
6792          otherwise asleep.
6793
6794          If android.sensor.info.timestampSource `==` REALTIME,
6795          The timestamp must be synchronized with the timestamps from other
6796          sensor subsystems that are using the same timebase.
6797
6798          For reprocessing, the input image's start of exposure can be looked up
6799          with android.sensor.timestamp from the metadata included in the
6800          capture request.
6801          </hal_details>
6802          <tag id="BC" />
6803        </entry>
6804        <entry name="temperature" type="float"
6805        optional="true">
6806          <description>The temperature of the sensor, sampled at the time
6807          exposure began for this frame.
6808
6809          The thermal diode being queried should be inside the sensor PCB, or
6810          somewhere close to it.
6811          </description>
6812
6813          <units>Celsius</units>
6814          <range>Optional. This value is missing if no temperature is available.</range>
6815          <tag id="FUTURE" />
6816        </entry>
6817        <entry name="neutralColorPoint" type="rational" visibility="public"
6818        optional="true" container="array">
6819          <array>
6820            <size>3</size>
6821          </array>
6822          <description>
6823          The estimated camera neutral color in the native sensor colorspace at
6824          the time of capture.
6825          </description>
6826          <details>
6827          This value gives the neutral color point encoded as an RGB value in the
6828          native sensor color space.  The neutral color point indicates the
6829          currently estimated white point of the scene illumination.  It can be
6830          used to interpolate between the provided color transforms when
6831          processing raw sensor data.
6832
6833          The order of the values is R, G, B; where R is in the lowest index.
6834          </details>
6835          <tag id="RAW" />
6836        </entry>
6837        <entry name="noiseProfile" type="double" visibility="public"
6838        optional="true" type_notes="Pairs of noise model coefficients"
6839        container="array" typedef="pairDoubleDouble">
6840          <array>
6841            <size>2</size>
6842            <size>CFA Channels</size>
6843          </array>
6844          <description>
6845          Noise model coefficients for each CFA mosaic channel.
6846          </description>
6847          <details>
6848          This key contains two noise model coefficients for each CFA channel
6849          corresponding to the sensor amplification (S) and sensor readout
6850          noise (O).  These are given as pairs of coefficients for each channel
6851          in the same order as channels listed for the CFA layout key
6852          (see android.sensor.info.colorFilterArrangement).  This is
6853          represented as an array of Pair&amp;lt;Double, Double&amp;gt;, where
6854          the first member of the Pair at index n is the S coefficient and the
6855          second member is the O coefficient for the nth color channel in the CFA.
6856
6857          These coefficients are used in a two parameter noise model to describe
6858          the amount of noise present in the image for each CFA channel.  The
6859          noise model used here is:
6860
6861          N(x) = sqrt(Sx + O)
6862
6863          Where x represents the recorded signal of a CFA channel normalized to
6864          the range [0, 1], and S and O are the noise model coeffiecients for
6865          that channel.
6866
6867          A more detailed description of the noise model can be found in the
6868          Adobe DNG specification for the NoiseProfile tag.
6869          </details>
6870          <hal_details>
6871          For a CFA layout of RGGB, the list of coefficients would be given as
6872          an array of doubles S0,O0,S1,O1,..., where S0 and O0 are the coefficients
6873          for the red channel, S1 and O1 are the coefficients for the first green
6874          channel, etc.
6875          </hal_details>
6876          <tag id="RAW" />
6877        </entry>
6878        <entry name="profileHueSatMap" type="float"
6879        visibility="system" optional="true"
6880        type_notes="Mapping for hue, saturation, and value"
6881        container="array">
6882          <array>
6883            <size>hue_samples</size>
6884            <size>saturation_samples</size>
6885            <size>value_samples</size>
6886            <size>3</size>
6887          </array>
6888          <description>
6889          A mapping containing a hue shift, saturation scale, and value scale
6890          for each pixel.
6891          </description>
6892          <units>
6893          The hue shift is given in degrees; saturation and value scale factors are
6894          unitless and are between 0 and 1 inclusive
6895          </units>
6896          <details>
6897          hue_samples, saturation_samples, and value_samples are given in
6898          android.sensor.profileHueSatMapDimensions.
6899
6900          Each entry of this map contains three floats corresponding to the
6901          hue shift, saturation scale, and value scale, respectively; where the
6902          hue shift has the lowest index. The map entries are stored in the key
6903          in nested loop order, with the value divisions in the outer loop, the
6904          hue divisions in the middle loop, and the saturation divisions in the
6905          inner loop. All zero input saturation entries are required to have a
6906          value scale factor of 1.0.
6907          </details>
6908          <tag id="RAW" />
6909        </entry>
6910        <entry name="profileToneCurve" type="float"
6911        visibility="system" optional="true"
6912        type_notes="Samples defining a spline for a tone-mapping curve"
6913        container="array">
6914          <array>
6915            <size>samples</size>
6916            <size>2</size>
6917          </array>
6918          <description>
6919          A list of x,y samples defining a tone-mapping curve for gamma adjustment.
6920          </description>
6921          <range>
6922          Each sample has an input range of `[0, 1]` and an output range of
6923          `[0, 1]`.  The first sample is required to be `(0, 0)`, and the last
6924          sample is required to be `(1, 1)`.
6925          </range>
6926          <details>
6927          This key contains a default tone curve that can be applied while
6928          processing the image as a starting point for user adjustments.
6929          The curve is specified as a list of value pairs in linear gamma.
6930          The curve is interpolated using a cubic spline.
6931          </details>
6932          <tag id="RAW" />
6933        </entry>
6934        <entry name="greenSplit" type="float" visibility="public" optional="true">
6935          <description>
6936          The worst-case divergence between Bayer green channels.
6937          </description>
6938          <range>
6939          &amp;gt;= 0
6940          </range>
6941          <details>
6942          This value is an estimate of the worst case split between the
6943          Bayer green channels in the red and blue rows in the sensor color
6944          filter array.
6945
6946          The green split is calculated as follows:
6947
6948          1. A 5x5 pixel (or larger) window W within the active sensor array is
6949          chosen. The term 'pixel' here is taken to mean a group of 4 Bayer
6950          mosaic channels (R, Gr, Gb, B).  The location and size of the window
6951          chosen is implementation defined, and should be chosen to provide a
6952          green split estimate that is both representative of the entire image
6953          for this camera sensor, and can be calculated quickly.
6954          1. The arithmetic mean of the green channels from the red
6955          rows (mean_Gr) within W is computed.
6956          1. The arithmetic mean of the green channels from the blue
6957          rows (mean_Gb) within W is computed.
6958          1. The maximum ratio R of the two means is computed as follows:
6959          `R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))`
6960
6961          The ratio R is the green split divergence reported for this property,
6962          which represents how much the green channels differ in the mosaic
6963          pattern.  This value is typically used to determine the treatment of
6964          the green mosaic channels when demosaicing.
6965
6966          The green split value can be roughly interpreted as follows:
6967
6968          * R &amp;lt; 1.03 is a negligible split (&amp;lt;3% divergence).
6969          * 1.20 &amp;lt;= R &amp;gt;= 1.03 will require some software
6970          correction to avoid demosaic errors (3-20% divergence).
6971          * R &amp;gt; 1.20 will require strong software correction to produce
6972          a usuable image (&amp;gt;20% divergence).
6973          </details>
6974          <hal_details>
6975          The green split given may be a static value based on prior
6976          characterization of the camera sensor using the green split
6977          calculation method given here over a large, representative, sample
6978          set of images.  Other methods of calculation that produce equivalent
6979          results, and can be interpreted in the same manner, may be used.
6980          </hal_details>
6981          <tag id="RAW" />
6982        </entry>
6983      </dynamic>
6984      <controls>
6985        <entry name="testPatternData" type="int32" visibility="public" optional="true" container="array">
6986          <array>
6987            <size>4</size>
6988          </array>
6989          <description>
6990            A pixel `[R, G_even, G_odd, B]` that supplies the test pattern
6991            when android.sensor.testPatternMode is SOLID_COLOR.
6992          </description>
6993          <details>
6994          Each color channel is treated as an unsigned 32-bit integer.
6995          The camera device then uses the most significant X bits
6996          that correspond to how many bits are in its Bayer raw sensor
6997          output.
6998
6999          For example, a sensor with RAW10 Bayer output would use the
7000          10 most significant bits from each color channel.
7001          </details>
7002          <hal_details>
7003          </hal_details>
7004        </entry>
7005        <entry name="testPatternMode" type="int32" visibility="public" optional="true"
7006          enum="true">
7007          <enum>
7008            <value>OFF
7009              <notes>No test pattern mode is used, and the camera
7010              device returns captures from the image sensor.
7011
7012              This is the default if the key is not set.</notes>
7013            </value>
7014            <value>SOLID_COLOR
7015              <notes>
7016              Each pixel in `[R, G_even, G_odd, B]` is replaced by its
7017              respective color channel provided in
7018              android.sensor.testPatternData.
7019
7020              For example:
7021
7022                  android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
7023
7024              All green pixels are 100% green. All red/blue pixels are black.
7025
7026                  android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
7027
7028              All red pixels are 100% red. Only the odd green pixels
7029              are 100% green. All blue pixels are 100% black.
7030              </notes>
7031            </value>
7032            <value>COLOR_BARS
7033              <notes>
7034              All pixel data is replaced with an 8-bar color pattern.
7035
7036              The vertical bars (left-to-right) are as follows:
7037
7038              * 100% white
7039              * yellow
7040              * cyan
7041              * green
7042              * magenta
7043              * red
7044              * blue
7045              * black
7046
7047              In general the image would look like the following:
7048
7049                 W Y C G M R B K
7050                 W Y C G M R B K
7051                 W Y C G M R B K
7052                 W Y C G M R B K
7053                 W Y C G M R B K
7054                 . . . . . . . .
7055                 . . . . . . . .
7056                 . . . . . . . .
7057
7058                 (B = Blue, K = Black)
7059
7060             Each bar should take up 1/8 of the sensor pixel array width.
7061             When this is not possible, the bar size should be rounded
7062             down to the nearest integer and the pattern can repeat
7063             on the right side.
7064
7065             Each bar's height must always take up the full sensor
7066             pixel array height.
7067
7068             Each pixel in this test pattern must be set to either
7069             0% intensity or 100% intensity.
7070             </notes>
7071            </value>
7072            <value>COLOR_BARS_FADE_TO_GRAY
7073              <notes>
7074              The test pattern is similar to COLOR_BARS, except that
7075              each bar should start at its specified color at the top,
7076              and fade to gray at the bottom.
7077
7078              Furthermore each bar is further subdivided into a left and
7079              right half. The left half should have a smooth gradient,
7080              and the right half should have a quantized gradient.
7081
7082              In particular, the right half's should consist of blocks of the
7083              same color for 1/16th active sensor pixel array width.
7084
7085              The least significant bits in the quantized gradient should
7086              be copied from the most significant bits of the smooth gradient.
7087
7088              The height of each bar should always be a multiple of 128.
7089              When this is not the case, the pattern should repeat at the bottom
7090              of the image.
7091              </notes>
7092            </value>
7093            <value>PN9
7094              <notes>
7095              All pixel data is replaced by a pseudo-random sequence
7096              generated from a PN9 512-bit sequence (typically implemented
7097              in hardware with a linear feedback shift register).
7098
7099              The generator should be reset at the beginning of each frame,
7100              and thus each subsequent raw frame with this test pattern should
7101              be exactly the same as the last.
7102              </notes>
7103            </value>
7104            <value id="256">CUSTOM1
7105              <notes>The first custom test pattern. All custom patterns that are
7106              available only on this camera device are at least this numeric
7107              value.
7108
7109              All of the custom test patterns will be static
7110              (that is the raw image must not vary from frame to frame).
7111              </notes>
7112            </value>
7113          </enum>
7114          <description>When enabled, the sensor sends a test pattern instead of
7115          doing a real exposure from the camera.
7116          </description>
7117          <range>android.sensor.availableTestPatternModes</range>
7118          <details>
7119          When a test pattern is enabled, all manual sensor controls specified
7120          by android.sensor.* will be ignored. All other controls should
7121          work as normal.
7122
7123          For example, if manual flash is enabled, flash firing should still
7124          occur (and that the test pattern remain unmodified, since the flash
7125          would not actually affect it).
7126
7127          Defaults to OFF.
7128          </details>
7129          <hal_details>
7130          All test patterns are specified in the Bayer domain.
7131
7132          The HAL may choose to substitute test patterns from the sensor
7133          with test patterns from on-device memory. In that case, it should be
7134          indistinguishable to the ISP whether the data came from the
7135          sensor interconnect bus (such as CSI2) or memory.
7136          </hal_details>
7137        </entry>
7138      </controls>
7139      <dynamic>
7140        <clone entry="android.sensor.testPatternData" kind="controls">
7141        </clone>
7142        <clone entry="android.sensor.testPatternMode" kind="controls">
7143        </clone>
7144      </dynamic>
7145      <static>
7146        <entry name="availableTestPatternModes" type="int32" visibility="public" optional="true"
7147          type_notes="list of enums" container="array">
7148          <array>
7149            <size>n</size>
7150          </array>
7151          <description>List of sensor test pattern modes for android.sensor.testPatternMode
7152          supported by this camera device.
7153          </description>
7154          <range>Any value listed in android.sensor.testPatternMode</range>
7155          <details>
7156            Defaults to OFF, and always includes OFF if defined.
7157          </details>
7158          <hal_details>
7159            All custom modes must be >= CUSTOM1.
7160          </hal_details>
7161        </entry>
7162      </static>
7163      <dynamic>
7164        <entry name="rollingShutterSkew" type="int64" visibility="public" hwlevel="limited">
7165          <description>Duration between the start of first row exposure
7166          and the start of last row exposure.</description>
7167          <units>Nanoseconds</units>
7168          <range> &amp;gt;= 0 and &amp;lt;
7169          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.</range>
7170          <details>
7171          This is the exposure time skew between the first and last
7172          row exposure start times. The first row and the last row are
7173          the first and last rows inside of the
7174          android.sensor.info.activeArraySize.
7175
7176          For typical camera sensors that use rolling shutters, this is also equivalent
7177          to the frame readout time.
7178          </details>
7179          <hal_details>
7180          The HAL must report `0` if the sensor is using global shutter, where all pixels begin
7181          exposure at the same time.
7182          </hal_details>
7183          <tag id="V1" />
7184        </entry>
7185      </dynamic>
7186      <static>
7187        <entry name="opticalBlackRegions" type="int32" visibility="public" optional="true"
7188          container="array" typedef="rectangle">
7189          <array>
7190            <size>4</size>
7191            <size>num_regions</size>
7192          </array>
7193          <description>List of disjoint rectangles indicating the sensor
7194          optically shielded black pixel regions.
7195          </description>
7196          <details>
7197            In most camera sensors, the active array is surrounded by some
7198            optically shielded pixel areas. By blocking light, these pixels
7199            provides a reliable black reference for black level compensation
7200            in active array region.
7201
7202            The data representation is int[4], which maps to (left, top, width, height).
7203
7204            This key provides a list of disjoint rectangles specifying the
7205            regions of optically shielded (with metal shield) black pixel
7206            regions if the camera device is capable of reading out these black
7207            pixels in the output raw images. In comparison to the fixed black
7208            level values reported by android.sensor.blackLevelPattern, this key
7209            may provide a more accurate way for the application to calculate
7210            black level of each captured raw images.
7211
7212            When this key is reported, the android.sensor.dynamicBlackLevel and
7213            android.sensor.dynamicWhiteLevel will also be reported.
7214          </details>
7215          <hal_details>
7216            This array contains (xmin, ymin, width, height). The (xmin, ymin)
7217            must be &amp;gt;= (0,0) and &amp;lt;=
7218            android.sensor.info.pixelArraySize. The (width, height) must be
7219            &amp;lt;= android.sensor.info.pixelArraySize. Each region must be
7220            outside the region reported by
7221            android.sensor.info.preCorrectionActiveArraySize.
7222
7223            The HAL must report minimal number of disjoint regions for the
7224            optically shielded back pixel regions. For example, if a region can
7225            be covered by one rectangle, the HAL must not split this region into
7226            multiple rectangles.
7227          </hal_details>
7228        </entry>
7229      </static>
7230      <dynamic>
7231        <entry name="dynamicBlackLevel" type="float" visibility="public"
7232        optional="true" type_notes="2x2 raw count block" container="array">
7233          <array>
7234            <size>4</size>
7235          </array>
7236          <description>
7237          A per-frame dynamic black level offset for each of the color filter
7238          arrangement (CFA) mosaic channels.
7239          </description>
7240          <range>&amp;gt;= 0 for each.</range>
7241          <details>
7242          Camera sensor black levels may vary dramatically for different
7243          capture settings (e.g. android.sensor.sensitivity). The fixed black
7244          level reported by android.sensor.blackLevelPattern may be too
7245          inaccurate to represent the actual value on a per-frame basis. The
7246          camera device internal pipeline relies on reliable black level values
7247          to process the raw images appropriately. To get the best image
7248          quality, the camera device may choose to estimate the per frame black
7249          level values either based on optically shielded black regions
7250          (android.sensor.opticalBlackRegions) or its internal model.
7251
7252          This key reports the camera device estimated per-frame zero light
7253          value for each of the CFA mosaic channels in the camera sensor. The
7254          android.sensor.blackLevelPattern may only represent a coarse
7255          approximation of the actual black level values. This value is the
7256          black level used in camera device internal image processing pipeline
7257          and generally more accurate than the fixed black level values.
7258          However, since they are estimated values by the camera device, they
7259          may not be as accurate as the black level values calculated from the
7260          optical black pixels reported by android.sensor.opticalBlackRegions.
7261
7262          The values are given in the same order as channels listed for the CFA
7263          layout key (see android.sensor.info.colorFilterArrangement), i.e. the
7264          nth value given corresponds to the black level offset for the nth
7265          color channel listed in the CFA.
7266
7267          This key will be available if android.sensor.opticalBlackRegions is
7268          available or the camera device advertises this key via
7269          {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
7270          </details>
7271          <hal_details>
7272          The values are given in row-column scan order, with the first value
7273          corresponding to the element of the CFA in row=0, column=0.
7274          </hal_details>
7275          <tag id="RAW" />
7276        </entry>
7277        <entry name="dynamicWhiteLevel" type="int32" visibility="public"
7278        optional="true" >
7279          <description>
7280          Maximum raw value output by sensor for this frame.
7281          </description>
7282          <range> &amp;gt;= 0</range>
7283          <details>
7284          Since the android.sensor.blackLevelPattern may change for different
7285          capture settings (e.g., android.sensor.sensitivity), the white
7286          level will change accordingly. This key is similar to
7287          android.sensor.info.whiteLevel, but specifies the camera device
7288          estimated white level for each frame.
7289
7290          This key will be available if android.sensor.opticalBlackRegions is
7291          available or the camera device advertises this key via
7292          {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
7293          </details>
7294          <hal_details>
7295          The full bit depth of the sensor must be available in the raw data,
7296          so the value for linear sensors should not be significantly lower
7297          than maximum raw value supported, i.e. 2^(sensor bits per pixel).
7298          </hal_details>
7299          <tag id="RAW" />
7300        </entry>
7301      </dynamic>
7302      <static>
7303        <entry name="opaqueRawSize" type="int32" visibility="system" container="array">
7304          <array>
7305            <size>n</size>
7306            <size>3</size>
7307          </array>
7308          <description>Size in bytes for all the listed opaque RAW buffer sizes</description>
7309          <range>Must be large enough to fit the opaque RAW of corresponding size produced by
7310          the camera</range>
7311          <details>
7312          This configurations are listed as `(width, height, size_in_bytes)` tuples.
7313          This is used for sizing the gralloc buffers for opaque RAW buffers.
7314          All RAW_OPAQUE output stream configuration listed in
7315          android.scaler.availableStreamConfigurations will have a corresponding tuple in
7316          this key.
7317          </details>
7318          <hal_details>
7319              This key is added in HAL3.4.
7320              For HAL3.4 or above: devices advertising RAW_OPAQUE format output must list this key.
7321              For HAL3.3 or earlier devices: if RAW_OPAQUE ouput is advertised, camera framework
7322              will derive this key by assuming each pixel takes two bytes and no padding bytes
7323              between rows.
7324          </hal_details>
7325        </entry>
7326      </static>
7327    </section>
7328    <section name="shading">
7329      <controls>
7330        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
7331          <enum>
7332            <value>OFF
7333            <notes>No lens shading correction is applied.</notes></value>
7334            <value>FAST
7335            <notes>Apply lens shading corrections, without slowing
7336            frame rate relative to sensor raw output</notes></value>
7337            <value>HIGH_QUALITY
7338            <notes>Apply high-quality lens shading correction, at the
7339            cost of possibly reduced frame rate.</notes></value>
7340          </enum>
7341          <description>Quality of lens shading correction applied
7342          to the image data.</description>
7343          <range>android.shading.availableModes</range>
7344          <details>
7345          When set to OFF mode, no lens shading correction will be applied by the
7346          camera device, and an identity lens shading map data will be provided
7347          if `android.statistics.lensShadingMapMode == ON`. For example, for lens
7348          shading map with size of `[ 4, 3 ]`,
7349          the output android.statistics.lensShadingCorrectionMap for this case will be an identity
7350          map shown below:
7351
7352              [ 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
7353               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
7354               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
7355               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
7356               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
7357               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0 ]
7358
7359          When set to other modes, lens shading correction will be applied by the camera
7360          device. Applications can request lens shading map data by setting
7361          android.statistics.lensShadingMapMode to ON, and then the camera device will provide lens
7362          shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map
7363          data will be the one applied by the camera device for this capture request.
7364
7365          The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
7366          the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
7367          AWB are in AUTO modes(android.control.aeMode `!=` OFF and android.control.awbMode `!=`
7368          OFF), to get best results, it is recommended that the applications wait for the AE and AWB
7369          to be converged before using the returned shading map data.
7370          </details>
7371        </entry>
7372        <entry name="strength" type="byte">
7373          <description>Control the amount of shading correction
7374          applied to the images</description>
7375          <units>unitless: 1-10; 10 is full shading
7376          compensation</units>
7377          <tag id="FUTURE" />
7378        </entry>
7379      </controls>
7380      <dynamic>
7381        <clone entry="android.shading.mode" kind="controls">
7382        </clone>
7383      </dynamic>
7384      <static>
7385        <entry name="availableModes" type="byte" visibility="public"
7386            type_notes="List of enums (android.shading.mode)." container="array"
7387            typedef="enumList" hwlevel="legacy">
7388          <array>
7389            <size>n</size>
7390          </array>
7391          <description>
7392          List of lens shading modes for android.shading.mode that are supported by this camera device.
7393          </description>
7394          <range>Any value listed in android.shading.mode</range>
7395          <details>
7396              This list contains lens shading modes that can be set for the camera device.
7397              Camera devices that support the MANUAL_POST_PROCESSING capability will always
7398              list OFF and FAST mode. This includes all FULL level devices.
7399              LEGACY devices will always only support FAST mode.
7400          </details>
7401          <hal_details>
7402            HAL must support both FAST and HIGH_QUALITY if lens shading correction control is
7403            available on the camera device, but the underlying implementation can be the same for
7404            both modes. That is, if the highest quality implementation on the camera device does not
7405            slow down capture rate, then FAST and HIGH_QUALITY will generate the same output.
7406          </hal_details>
7407        </entry>
7408      </static>
7409    </section>
7410    <section name="statistics">
7411      <controls>
7412        <entry name="faceDetectMode" type="byte" visibility="public" enum="true"
7413               hwlevel="legacy">
7414          <enum>
7415            <value>OFF
7416            <notes>Do not include face detection statistics in capture
7417            results.</notes></value>
7418            <value optional="true">SIMPLE
7419            <notes>Return face rectangle and confidence values only.
7420            </notes></value>
7421            <value optional="true">FULL
7422            <notes>Return all face
7423            metadata.
7424
7425            In this mode, face rectangles, scores, landmarks, and face IDs are all valid.
7426            </notes></value>
7427          </enum>
7428          <description>Operating mode for the face detector
7429          unit.</description>
7430          <range>android.statistics.info.availableFaceDetectModes</range>
7431          <details>Whether face detection is enabled, and whether it
7432          should output just the basic fields or the full set of
7433          fields.</details>
7434          <hal_details>
7435            SIMPLE mode must fill in android.statistics.faceRectangles and
7436            android.statistics.faceScores.
7437            FULL mode must also fill in android.statistics.faceIds, and
7438            android.statistics.faceLandmarks.
7439          </hal_details>
7440          <tag id="BC" />
7441        </entry>
7442        <entry name="histogramMode" type="byte" enum="true" typedef="boolean">
7443          <enum>
7444            <value>OFF</value>
7445            <value>ON</value>
7446          </enum>
7447          <description>Operating mode for histogram
7448          generation</description>
7449          <tag id="FUTURE" />
7450        </entry>
7451        <entry name="sharpnessMapMode" type="byte" enum="true" typedef="boolean">
7452          <enum>
7453            <value>OFF</value>
7454            <value>ON</value>
7455          </enum>
7456          <description>Operating mode for sharpness map
7457          generation</description>
7458          <tag id="FUTURE" />
7459        </entry>
7460        <entry name="hotPixelMapMode" type="byte" visibility="public" enum="true"
7461        typedef="boolean">
7462          <enum>
7463            <value>OFF
7464            <notes>Hot pixel map production is disabled.
7465            </notes></value>
7466            <value>ON
7467            <notes>Hot pixel map production is enabled.
7468            </notes></value>
7469          </enum>
7470          <description>
7471          Operating mode for hot pixel map generation.
7472          </description>
7473          <range>android.statistics.info.availableHotPixelMapModes</range>
7474          <details>
7475          If set to `true`, a hot pixel map is returned in android.statistics.hotPixelMap.
7476          If set to `false`, no hot pixel map will be returned.
7477          </details>
7478          <tag id="V1" />
7479          <tag id="RAW" />
7480        </entry>
7481      </controls>
7482      <static>
7483        <namespace name="info">
7484          <entry name="availableFaceDetectModes" type="byte"
7485                 visibility="public"
7486                 type_notes="List of enums from android.statistics.faceDetectMode"
7487                 container="array"
7488                 typedef="enumList"
7489                 hwlevel="legacy">
7490            <array>
7491              <size>n</size>
7492            </array>
7493            <description>List of face detection modes for android.statistics.faceDetectMode that are
7494            supported by this camera device.
7495            </description>
7496            <range>Any value listed in android.statistics.faceDetectMode</range>
7497            <details>OFF is always supported.
7498            </details>
7499          </entry>
7500          <entry name="histogramBucketCount" type="int32">
7501            <description>Number of histogram buckets
7502            supported</description>
7503            <range>&amp;gt;= 64</range>
7504            <tag id="FUTURE" />
7505          </entry>
7506          <entry name="maxFaceCount" type="int32" visibility="public" hwlevel="legacy">
7507            <description>The maximum number of simultaneously detectable
7508            faces.</description>
7509            <range>0 for cameras without available face detection; otherwise:
7510            `&gt;=4` for LIMITED or FULL hwlevel devices or
7511            `&gt;0` for LEGACY devices.</range>
7512            <tag id="BC" />
7513          </entry>
7514          <entry name="maxHistogramCount" type="int32">
7515            <description>Maximum value possible for a histogram
7516            bucket</description>
7517            <tag id="FUTURE" />
7518          </entry>
7519          <entry name="maxSharpnessMapValue" type="int32">
7520            <description>Maximum value possible for a sharpness map
7521            region.</description>
7522            <tag id="FUTURE" />
7523          </entry>
7524          <entry name="sharpnessMapSize" type="int32"
7525          type_notes="width x height" container="array" typedef="size">
7526            <array>
7527              <size>2</size>
7528            </array>
7529            <description>Dimensions of the sharpness
7530            map</description>
7531            <range>Must be at least 32 x 32</range>
7532            <tag id="FUTURE" />
7533          </entry>
7534          <entry name="availableHotPixelMapModes" type="byte" visibility="public"
7535                 type_notes="list of enums" container="array" typedef="boolean">
7536            <array>
7537              <size>n</size>
7538            </array>
7539            <description>
7540            List of hot pixel map output modes for android.statistics.hotPixelMapMode that are
7541            supported by this camera device.
7542            </description>
7543            <range>Any value listed in android.statistics.hotPixelMapMode</range>
7544            <details>
7545            If no hotpixel map output is available for this camera device, this will contain only
7546            `false`.
7547
7548            ON is always supported on devices with the RAW capability.
7549            </details>
7550            <tag id="V1" />
7551            <tag id="RAW" />
7552          </entry>
7553          <entry name="availableLensShadingMapModes" type="byte" visibility="public"
7554                 type_notes="list of enums" container="array" typedef="enumList">
7555            <array>
7556              <size>n</size>
7557            </array>
7558            <description>
7559            List of lens shading map output modes for android.statistics.lensShadingMapMode that
7560            are supported by this camera device.
7561            </description>
7562            <range>Any value listed in android.statistics.lensShadingMapMode</range>
7563            <details>
7564            If no lens shading map output is available for this camera device, this key will
7565            contain only OFF.
7566
7567            ON is always supported on devices with the RAW capability.
7568            LEGACY mode devices will always only support OFF.
7569            </details>
7570          </entry>
7571        </namespace>
7572      </static>
7573      <dynamic>
7574        <clone entry="android.statistics.faceDetectMode"
7575               kind="controls"></clone>
7576        <entry name="faceIds" type="int32" visibility="ndk_public"
7577               container="array" hwlevel="legacy">
7578          <array>
7579            <size>n</size>
7580          </array>
7581          <description>List of unique IDs for detected faces.</description>
7582          <details>
7583          Each detected face is given a unique ID that is valid for as long as the face is visible
7584          to the camera device.  A face that leaves the field of view and later returns may be
7585          assigned a new ID.
7586
7587          Only available if android.statistics.faceDetectMode == FULL</details>
7588          <tag id="BC" />
7589        </entry>
7590        <entry name="faceLandmarks" type="int32" visibility="ndk_public"
7591               type_notes="(leftEyeX, leftEyeY, rightEyeX, rightEyeY, mouthX, mouthY)"
7592               container="array" hwlevel="legacy">
7593          <array>
7594            <size>n</size>
7595            <size>6</size>
7596          </array>
7597          <description>List of landmarks for detected
7598          faces.</description>
7599          <details>
7600            The coordinate system is that of android.sensor.info.activeArraySize, with
7601            `(0, 0)` being the top-left pixel of the active array.
7602
7603            Only available if android.statistics.faceDetectMode == FULL</details>
7604          <tag id="BC" />
7605        </entry>
7606        <entry name="faceRectangles" type="int32" visibility="ndk_public"
7607               type_notes="(xmin, ymin, xmax, ymax). (0,0) is top-left of active pixel area"
7608               container="array" typedef="rectangle" hwlevel="legacy">
7609          <array>
7610            <size>n</size>
7611            <size>4</size>
7612          </array>
7613          <description>List of the bounding rectangles for detected
7614          faces.</description>
7615          <details>
7616            The data representation is int[4], which maps to (left, top, width, height).
7617
7618            The coordinate system is that of android.sensor.info.activeArraySize, with
7619            `(0, 0)` being the top-left pixel of the active array.
7620
7621            Only available if android.statistics.faceDetectMode != OFF</details>
7622          <tag id="BC" />
7623        </entry>
7624        <entry name="faceScores" type="byte" visibility="ndk_public"
7625               container="array" hwlevel="legacy">
7626          <array>
7627            <size>n</size>
7628          </array>
7629          <description>List of the face confidence scores for
7630          detected faces</description>
7631          <range>1-100</range>
7632          <details>Only available if android.statistics.faceDetectMode != OFF.
7633          </details>
7634          <hal_details>
7635          The value should be meaningful (for example, setting 100 at
7636          all times is illegal).</hal_details>
7637          <tag id="BC" />
7638        </entry>
7639        <entry name="faces" type="int32" visibility="java_public" synthetic="true"
7640               container="array" typedef="face" hwlevel="legacy">
7641          <array>
7642            <size>n</size>
7643          </array>
7644          <description>List of the faces detected through camera face detection
7645          in this capture.</description>
7646          <details>
7647          Only available if android.statistics.faceDetectMode `!=` OFF.
7648          </details>
7649        </entry>
7650        <entry name="histogram" type="int32"
7651        type_notes="count of pixels for each color channel that fall into each histogram bucket, scaled to be between 0 and maxHistogramCount"
7652        container="array">
7653          <array>
7654            <size>n</size>
7655            <size>3</size>
7656          </array>
7657          <description>A 3-channel histogram based on the raw
7658          sensor data</description>
7659          <details>The k'th bucket (0-based) covers the input range
7660          (with w = android.sensor.info.whiteLevel) of [ k * w/N,
7661          (k + 1) * w / N ). If only a monochrome sharpness map is
7662          supported, all channels should have the same data</details>
7663          <tag id="FUTURE" />
7664        </entry>
7665        <clone entry="android.statistics.histogramMode"
7666        kind="controls"></clone>
7667        <entry name="sharpnessMap" type="int32"
7668        type_notes="estimated sharpness for each region of the input image. Normalized to be between 0 and maxSharpnessMapValue. Higher values mean sharper (better focused)"
7669        container="array">
7670          <array>
7671            <size>n</size>
7672            <size>m</size>
7673            <size>3</size>
7674          </array>
7675          <description>A 3-channel sharpness map, based on the raw
7676          sensor data</description>
7677          <details>If only a monochrome sharpness map is supported,
7678          all channels should have the same data</details>
7679          <tag id="FUTURE" />
7680        </entry>
7681        <clone entry="android.statistics.sharpnessMapMode"
7682               kind="controls"></clone>
7683        <entry name="lensShadingCorrectionMap" type="byte" visibility="java_public"
7684               typedef="lensShadingMap" hwlevel="full">
7685          <description>The shading map is a low-resolution floating-point map
7686          that lists the coefficients used to correct for vignetting, for each
7687          Bayer color channel.</description>
7688          <range>Each gain factor is &amp;gt;= 1</range>
7689          <details>
7690          The map provided here is the same map that is used by the camera device to
7691          correct both color shading and vignetting for output non-RAW images.
7692
7693          When there is no lens shading correction applied to RAW
7694          output images (android.sensor.info.lensShadingApplied `==`
7695          false), this map is the complete lens shading correction
7696          map; when there is some lens shading correction applied to
7697          the RAW output image (android.sensor.info.lensShadingApplied
7698          `==` true), this map reports the remaining lens shading
7699          correction map that needs to be applied to get shading
7700          corrected images that match the camera device's output for
7701          non-RAW formats.
7702
7703          For a complete shading correction map, the least shaded
7704          section of the image will have a gain factor of 1; all
7705          other sections will have gains above 1.
7706
7707          When android.colorCorrection.mode = TRANSFORM_MATRIX, the map
7708          will take into account the colorCorrection settings.
7709
7710          The shading map is for the entire active pixel array, and is not
7711          affected by the crop region specified in the request. Each shading map
7712          entry is the value of the shading compensation map over a specific
7713          pixel on the sensor.  Specifically, with a (N x M) resolution shading
7714          map, and an active pixel array size (W x H), shading map entry
7715          (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
7716          pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
7717          The map is assumed to be bilinearly interpolated between the sample points.
7718
7719          The channel order is [R, Geven, Godd, B], where Geven is the green
7720          channel for the even rows of a Bayer pattern, and Godd is the odd rows.
7721          The shading map is stored in a fully interleaved format.
7722
7723          The shading map will generally have on the order of 30-40 rows and columns,
7724          and will be smaller than 64x64.
7725
7726          As an example, given a very small map defined as:
7727
7728              width,height = [ 4, 3 ]
7729              values =
7730              [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
7731                  1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
7732                1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
7733                  1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
7734                1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
7735                  1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
7736
7737          The low-resolution scaling map images for each channel are
7738          (displayed using nearest-neighbor interpolation):
7739
7740          ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png)
7741          ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png)
7742          ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png)
7743          ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png)
7744
7745          As a visualization only, inverting the full-color map to recover an
7746          image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:
7747
7748          ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png)
7749          </details>
7750        </entry>
7751        <entry name="lensShadingMap" type="float" visibility="ndk_public"
7752               type_notes="2D array of float gain factors per channel to correct lens shading"
7753               container="array" hwlevel="full">
7754          <array>
7755            <size>4</size>
7756            <size>n</size>
7757            <size>m</size>
7758          </array>
7759          <description>The shading map is a low-resolution floating-point map
7760          that lists the coefficients used to correct for vignetting and color shading,
7761          for each Bayer color channel of RAW image data.</description>
7762          <range>Each gain factor is &amp;gt;= 1</range>
7763          <details>
7764          The map provided here is the same map that is used by the camera device to
7765          correct both color shading and vignetting for output non-RAW images.
7766
7767          When there is no lens shading correction applied to RAW
7768          output images (android.sensor.info.lensShadingApplied `==`
7769          false), this map is the complete lens shading correction
7770          map; when there is some lens shading correction applied to
7771          the RAW output image (android.sensor.info.lensShadingApplied
7772          `==` true), this map reports the remaining lens shading
7773          correction map that needs to be applied to get shading
7774          corrected images that match the camera device's output for
7775          non-RAW formats.
7776
7777          For a complete shading correction map, the least shaded
7778          section of the image will have a gain factor of 1; all
7779          other sections will have gains above 1.
7780
7781          When android.colorCorrection.mode = TRANSFORM_MATRIX, the map
7782          will take into account the colorCorrection settings.
7783
7784          The shading map is for the entire active pixel array, and is not
7785          affected by the crop region specified in the request. Each shading map
7786          entry is the value of the shading compensation map over a specific
7787          pixel on the sensor.  Specifically, with a (N x M) resolution shading
7788          map, and an active pixel array size (W x H), shading map entry
7789          (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
7790          pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
7791          The map is assumed to be bilinearly interpolated between the sample points.
7792
7793          The channel order is [R, Geven, Godd, B], where Geven is the green
7794          channel for the even rows of a Bayer pattern, and Godd is the odd rows.
7795          The shading map is stored in a fully interleaved format, and its size
7796          is provided in the camera static metadata by android.lens.info.shadingMapSize.
7797
7798          The shading map will generally have on the order of 30-40 rows and columns,
7799          and will be smaller than 64x64.
7800
7801          As an example, given a very small map defined as:
7802
7803              android.lens.info.shadingMapSize = [ 4, 3 ]
7804              android.statistics.lensShadingMap =
7805              [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
7806                  1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
7807                1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
7808                  1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
7809                1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
7810                  1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
7811
7812          The low-resolution scaling map images for each channel are
7813          (displayed using nearest-neighbor interpolation):
7814
7815          ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png)
7816          ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png)
7817          ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png)
7818          ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png)
7819
7820          As a visualization only, inverting the full-color map to recover an
7821          image of a gray wall (using bicubic interpolation for visual quality)
7822          as captured by the sensor gives:
7823
7824          ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png)
7825
7826          Note that the RAW image data might be subject to lens shading
7827          correction not reported on this map. Query
7828          android.sensor.info.lensShadingApplied to see if RAW image data has subject
7829          to lens shading correction. If android.sensor.info.lensShadingApplied
7830          is TRUE, the RAW image data is subject to partial or full lens shading
7831          correction. In the case full lens shading correction is applied to RAW
7832          images, the gain factor map reported in this key will contain all 1.0 gains.
7833          In other words, the map reported in this key is the remaining lens shading
7834          that needs to be applied on the RAW image to get images without lens shading
7835          artifacts. See android.request.maxNumOutputRaw for a list of RAW image
7836          formats.
7837          </details>
7838          <hal_details>
7839          The lens shading map calculation may depend on exposure and white balance statistics.
7840          When AE and AWB are in AUTO modes
7841          (android.control.aeMode `!=` OFF and android.control.awbMode `!=` OFF), the HAL
7842          may have all the information it need to generate most accurate lens shading map. When
7843          AE or AWB are in manual mode
7844          (android.control.aeMode `==` OFF or android.control.awbMode `==` OFF), the shading map
7845          may be adversely impacted by manual exposure or white balance parameters. To avoid
7846          generating unreliable shading map data, the HAL may choose to lock the shading map with
7847          the latest known good map generated when the AE and AWB are in AUTO modes.
7848          </hal_details>
7849        </entry>
7850        <entry name="predictedColorGains" type="float"
7851               visibility="hidden"
7852               deprecated="true"
7853               optional="true"
7854               type_notes="A 1D array of floats for 4 color channel gains"
7855               container="array">
7856          <array>
7857            <size>4</size>
7858          </array>
7859          <description>The best-fit color channel gains calculated
7860          by the camera device's statistics units for the current output frame.
7861          </description>
7862          <details>
7863          This may be different than the gains used for this frame,
7864          since statistics processing on data from a new frame
7865          typically completes after the transform has already been
7866          applied to that frame.
7867
7868          The 4 channel gains are defined in Bayer domain,
7869          see android.colorCorrection.gains for details.
7870
7871          This value should always be calculated by the auto-white balance (AWB) block,
7872          regardless of the android.control.* current values.
7873          </details>
7874        </entry>
7875        <entry name="predictedColorTransform" type="rational"
7876               visibility="hidden"
7877               deprecated="true"
7878               optional="true"
7879               type_notes="3x3 rational matrix in row-major order"
7880               container="array">
7881          <array>
7882            <size>3</size>
7883            <size>3</size>
7884          </array>
7885          <description>The best-fit color transform matrix estimate
7886          calculated by the camera device's statistics units for the current
7887          output frame.</description>
7888          <details>The camera device will provide the estimate from its
7889          statistics unit on the white balance transforms to use
7890          for the next frame. These are the values the camera device believes
7891          are the best fit for the current output frame. This may
7892          be different than the transform used for this frame, since
7893          statistics processing on data from a new frame typically
7894          completes after the transform has already been applied to
7895          that frame.
7896
7897          These estimates must be provided for all frames, even if
7898          capture settings and color transforms are set by the application.
7899
7900          This value should always be calculated by the auto-white balance (AWB) block,
7901          regardless of the android.control.* current values.
7902          </details>
7903        </entry>
7904        <entry name="sceneFlicker" type="byte" visibility="public" enum="true"
7905               hwlevel="full">
7906          <enum>
7907            <value>NONE
7908            <notes>The camera device does not detect any flickering illumination
7909            in the current scene.</notes></value>
7910            <value>50HZ
7911            <notes>The camera device detects illumination flickering at 50Hz
7912            in the current scene.</notes></value>
7913            <value>60HZ
7914            <notes>The camera device detects illumination flickering at 60Hz
7915            in the current scene.</notes></value>
7916          </enum>
7917          <description>The camera device estimated scene illumination lighting
7918          frequency.</description>
7919          <details>
7920          Many light sources, such as most fluorescent lights, flicker at a rate
7921          that depends on the local utility power standards. This flicker must be
7922          accounted for by auto-exposure routines to avoid artifacts in captured images.
7923          The camera device uses this entry to tell the application what the scene
7924          illuminant frequency is.
7925
7926          When manual exposure control is enabled
7927          (`android.control.aeMode == OFF` or `android.control.mode ==
7928          OFF`), the android.control.aeAntibandingMode doesn't perform
7929          antibanding, and the application can ensure it selects
7930          exposure times that do not cause banding issues by looking
7931          into this metadata field. See
7932          android.control.aeAntibandingMode for more details.
7933
7934          Reports NONE if there doesn't appear to be flickering illumination.
7935          </details>
7936        </entry>
7937        <clone entry="android.statistics.hotPixelMapMode" kind="controls">
7938        </clone>
7939        <entry name="hotPixelMap" type="int32" visibility="public"
7940        type_notes="list of coordinates based on android.sensor.pixelArraySize"
7941        container="array" typedef="point">
7942          <array>
7943            <size>2</size>
7944            <size>n</size>
7945          </array>
7946          <description>
7947          List of `(x, y)` coordinates of hot/defective pixels on the sensor.
7948          </description>
7949          <range>
7950          n &lt;= number of pixels on the sensor.
7951          The `(x, y)` coordinates must be bounded by
7952          android.sensor.info.pixelArraySize.
7953          </range>
7954          <details>
7955          A coordinate `(x, y)` must lie between `(0, 0)`, and
7956          `(width - 1, height - 1)` (inclusive), which are the top-left and
7957          bottom-right of the pixel array, respectively. The width and
7958          height dimensions are given in android.sensor.info.pixelArraySize.
7959          This may include hot pixels that lie outside of the active array
7960          bounds given by android.sensor.info.activeArraySize.
7961          </details>
7962          <hal_details>
7963          A hotpixel map contains the coordinates of pixels on the camera
7964          sensor that do report valid values (usually due to defects in
7965          the camera sensor). This includes pixels that are stuck at certain
7966          values, or have a response that does not accuractly encode the
7967          incoming light from the scene.
7968
7969          To avoid performance issues, there should be significantly fewer hot
7970          pixels than actual pixels on the camera sensor.
7971          </hal_details>
7972          <tag id="V1" />
7973          <tag id="RAW" />
7974        </entry>
7975      </dynamic>
7976      <controls>
7977        <entry name="lensShadingMapMode" type="byte" visibility="public" enum="true" hwlevel="full">
7978          <enum>
7979            <value>OFF
7980            <notes>Do not include a lens shading map in the capture result.</notes></value>
7981            <value>ON
7982            <notes>Include a lens shading map in the capture result.</notes></value>
7983          </enum>
7984          <description>Whether the camera device will output the lens
7985          shading map in output result metadata.</description>
7986          <range>android.statistics.info.availableLensShadingMapModes</range>
7987          <details>When set to ON,
7988          android.statistics.lensShadingMap will be provided in
7989          the output result metadata.
7990
7991          ON is always supported on devices with the RAW capability.
7992          </details>
7993          <tag id="RAW" />
7994        </entry>
7995      </controls>
7996      <dynamic>
7997        <clone entry="android.statistics.lensShadingMapMode" kind="controls">
7998        </clone>
7999      </dynamic>
8000    </section>
8001    <section name="tonemap">
8002      <controls>
8003        <entry name="curveBlue" type="float" visibility="ndk_public"
8004        type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
8005        container="array" hwlevel="full">
8006          <array>
8007            <size>n</size>
8008            <size>2</size>
8009          </array>
8010          <description>Tonemapping / contrast / gamma curve for the blue
8011          channel, to use when android.tonemap.mode is
8012          CONTRAST_CURVE.</description>
8013          <details>See android.tonemap.curveRed for more details.</details>
8014        </entry>
8015        <entry name="curveGreen" type="float" visibility="ndk_public"
8016        type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
8017        container="array" hwlevel="full">
8018          <array>
8019            <size>n</size>
8020            <size>2</size>
8021          </array>
8022          <description>Tonemapping / contrast / gamma curve for the green
8023          channel, to use when android.tonemap.mode is
8024          CONTRAST_CURVE.</description>
8025          <details>See android.tonemap.curveRed for more details.</details>
8026        </entry>
8027        <entry name="curveRed" type="float" visibility="ndk_public"
8028        type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
8029        container="array" hwlevel="full">
8030          <array>
8031            <size>n</size>
8032            <size>2</size>
8033          </array>
8034          <description>Tonemapping / contrast / gamma curve for the red
8035          channel, to use when android.tonemap.mode is
8036          CONTRAST_CURVE.</description>
8037          <range>0-1 on both input and output coordinates, normalized
8038          as a floating-point value such that 0 == black and 1 == white.
8039          </range>
8040          <details>
8041          Each channel's curve is defined by an array of control points:
8042
8043              android.tonemap.curveRed =
8044                [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
8045              2 &lt;= N &lt;= android.tonemap.maxCurvePoints
8046
8047          These are sorted in order of increasing `Pin`; it is
8048          required that input values 0.0 and 1.0 are included in the list to
8049          define a complete mapping. For input values between control points,
8050          the camera device must linearly interpolate between the control
8051          points.
8052
8053          Each curve can have an independent number of points, and the number
8054          of points can be less than max (that is, the request doesn't have to
8055          always provide a curve with number of points equivalent to
8056          android.tonemap.maxCurvePoints).
8057
8058          A few examples, and their corresponding graphical mappings; these
8059          only specify the red channel and the precision is limited to 4
8060          digits, for conciseness.
8061
8062          Linear mapping:
8063
8064              android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
8065
8066          ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png)
8067
8068          Invert mapping:
8069
8070              android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
8071
8072          ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png)
8073
8074          Gamma 1/2.2 mapping, with 16 control points:
8075
8076              android.tonemap.curveRed = [
8077                0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
8078                0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
8079                0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
8080                0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
8081
8082          ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png)
8083
8084          Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
8085
8086              android.tonemap.curveRed = [
8087                0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
8088                0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
8089                0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
8090                0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
8091
8092          ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
8093        </details>
8094        <hal_details>
8095          For good quality of mapping, at least 128 control points are
8096          preferred.
8097
8098          A typical use case of this would be a gamma-1/2.2 curve, with as many
8099          control points used as are available.
8100        </hal_details>
8101        </entry>
8102        <entry name="curve" type="float" visibility="java_public" synthetic="true"
8103               typedef="tonemapCurve"
8104               hwlevel="full">
8105          <description>Tonemapping / contrast / gamma curve to use when android.tonemap.mode
8106          is CONTRAST_CURVE.</description>
8107          <details>
8108          The tonemapCurve consist of three curves for each of red, green, and blue
8109          channels respectively. The following example uses the red channel as an
8110          example. The same logic applies to green and blue channel.
8111          Each channel's curve is defined by an array of control points:
8112
8113              curveRed =
8114                [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
8115              2 &lt;= N &lt;= android.tonemap.maxCurvePoints
8116
8117          These are sorted in order of increasing `Pin`; it is always
8118          guaranteed that input values 0.0 and 1.0 are included in the list to
8119          define a complete mapping. For input values between control points,
8120          the camera device must linearly interpolate between the control
8121          points.
8122
8123          Each curve can have an independent number of points, and the number
8124          of points can be less than max (that is, the request doesn't have to
8125          always provide a curve with number of points equivalent to
8126          android.tonemap.maxCurvePoints).
8127
8128          A few examples, and their corresponding graphical mappings; these
8129          only specify the red channel and the precision is limited to 4
8130          digits, for conciseness.
8131
8132          Linear mapping:
8133
8134              curveRed = [ (0, 0), (1.0, 1.0) ]
8135
8136          ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png)
8137
8138          Invert mapping:
8139
8140              curveRed = [ (0, 1.0), (1.0, 0) ]
8141
8142          ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png)
8143
8144          Gamma 1/2.2 mapping, with 16 control points:
8145
8146              curveRed = [
8147                (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
8148                (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
8149                (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
8150                (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
8151
8152          ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png)
8153
8154          Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
8155
8156              curveRed = [
8157                (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
8158                (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
8159                (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
8160                (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
8161
8162          ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
8163        </details>
8164        <hal_details>
8165            This entry is created by the framework from the curveRed, curveGreen and
8166            curveBlue entries.
8167        </hal_details>
8168        </entry>
8169        <entry name="mode" type="byte" visibility="public" enum="true"
8170               hwlevel="full">
8171          <enum>
8172            <value>CONTRAST_CURVE
8173              <notes>Use the tone mapping curve specified in
8174              the android.tonemap.curve* entries.
8175
8176              All color enhancement and tonemapping must be disabled, except
8177              for applying the tonemapping curve specified by
8178              android.tonemap.curve.
8179
8180              Must not slow down frame rate relative to raw
8181              sensor output.
8182              </notes>
8183            </value>
8184            <value>FAST
8185              <notes>
8186              Advanced gamma mapping and color enhancement may be applied, without
8187              reducing frame rate compared to raw sensor output.
8188              </notes>
8189            </value>
8190            <value>HIGH_QUALITY
8191              <notes>
8192              High-quality gamma mapping and color enhancement will be applied, at
8193              the cost of possibly reduced frame rate compared to raw sensor output.
8194              </notes>
8195            </value>
8196            <value>GAMMA_VALUE
8197              <notes>
8198              Use the gamma value specified in android.tonemap.gamma to peform
8199              tonemapping.
8200
8201              All color enhancement and tonemapping must be disabled, except
8202              for applying the tonemapping curve specified by android.tonemap.gamma.
8203
8204              Must not slow down frame rate relative to raw sensor output.
8205              </notes>
8206            </value>
8207            <value>PRESET_CURVE
8208              <notes>
8209              Use the preset tonemapping curve specified in
8210              android.tonemap.presetCurve to peform tonemapping.
8211
8212              All color enhancement and tonemapping must be disabled, except
8213              for applying the tonemapping curve specified by
8214              android.tonemap.presetCurve.
8215
8216              Must not slow down frame rate relative to raw sensor output.
8217              </notes>
8218            </value>
8219          </enum>
8220          <description>High-level global contrast/gamma/tonemapping control.
8221          </description>
8222          <range>android.tonemap.availableToneMapModes</range>
8223          <details>
8224          When switching to an application-defined contrast curve by setting
8225          android.tonemap.mode to CONTRAST_CURVE, the curve is defined
8226          per-channel with a set of `(in, out)` points that specify the
8227          mapping from input high-bit-depth pixel value to the output
8228          low-bit-depth value.  Since the actual pixel ranges of both input
8229          and output may change depending on the camera pipeline, the values
8230          are specified by normalized floating-point numbers.
8231
8232          More-complex color mapping operations such as 3D color look-up
8233          tables, selective chroma enhancement, or other non-linear color
8234          transforms will be disabled when android.tonemap.mode is
8235          CONTRAST_CURVE.
8236
8237          When using either FAST or HIGH_QUALITY, the camera device will
8238          emit its own tonemap curve in android.tonemap.curve.
8239          These values are always available, and as close as possible to the
8240          actually used nonlinear/nonglobal transforms.
8241
8242          If a request is sent with CONTRAST_CURVE with the camera device's
8243          provided curve in FAST or HIGH_QUALITY, the image's tonemap will be
8244          roughly the same.</details>
8245        </entry>
8246      </controls>
8247      <static>
8248        <entry name="maxCurvePoints" type="int32" visibility="public"
8249               hwlevel="full">
8250          <description>Maximum number of supported points in the
8251            tonemap curve that can be used for android.tonemap.curve.
8252          </description>
8253          <details>
8254          If the actual number of points provided by the application (in android.tonemap.curve*) is
8255          less than this maximum, the camera device will resample the curve to its internal
8256          representation, using linear interpolation.
8257
8258          The output curves in the result metadata may have a different number
8259          of points than the input curves, and will represent the actual
8260          hardware curves used as closely as possible when linearly interpolated.
8261          </details>
8262          <hal_details>
8263          This value must be at least 64. This should be at least 128.
8264          </hal_details>
8265        </entry>
8266        <entry name="availableToneMapModes" type="byte" visibility="public"
8267        type_notes="list of enums" container="array" typedef="enumList" hwlevel="full">
8268          <array>
8269            <size>n</size>
8270          </array>
8271          <description>
8272          List of tonemapping modes for android.tonemap.mode that are supported by this camera
8273          device.
8274          </description>
8275          <range>Any value listed in android.tonemap.mode</range>
8276          <details>
8277          Camera devices that support the MANUAL_POST_PROCESSING capability will always contain
8278          at least one of below mode combinations:
8279
8280          * CONTRAST_CURVE, FAST and HIGH_QUALITY
8281          * GAMMA_VALUE, PRESET_CURVE, FAST and HIGH_QUALITY
8282
8283          This includes all FULL level devices.
8284          </details>
8285          <hal_details>
8286            HAL must support both FAST and HIGH_QUALITY if automatic tonemap control is available
8287            on the camera device, but the underlying implementation can be the same for both modes.
8288            That is, if the highest quality implementation on the camera device does not slow down
8289            capture rate, then FAST and HIGH_QUALITY will generate the same output.
8290          </hal_details>
8291        </entry>
8292      </static>
8293      <dynamic>
8294        <clone entry="android.tonemap.curveBlue" kind="controls">
8295        </clone>
8296        <clone entry="android.tonemap.curveGreen" kind="controls">
8297        </clone>
8298        <clone entry="android.tonemap.curveRed" kind="controls">
8299        </clone>
8300        <clone entry="android.tonemap.curve" kind="controls">
8301        </clone>
8302        <clone entry="android.tonemap.mode" kind="controls">
8303        </clone>
8304      </dynamic>
8305      <controls>
8306        <entry name="gamma" type="float" visibility="public">
8307          <description> Tonemapping curve to use when android.tonemap.mode is
8308          GAMMA_VALUE
8309          </description>
8310          <details>
8311          The tonemap curve will be defined the following formula:
8312          * OUT = pow(IN, 1.0 / gamma)
8313          where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
8314          pow is the power function and gamma is the gamma value specified by this
8315          key.
8316
8317          The same curve will be applied to all color channels. The camera device
8318          may clip the input gamma value to its supported range. The actual applied
8319          value will be returned in capture result.
8320
8321          The valid range of gamma value varies on different devices, but values
8322          within [1.0, 5.0] are guaranteed not to be clipped.
8323          </details>
8324        </entry>
8325        <entry name="presetCurve" type="byte" visibility="public" enum="true">
8326          <enum>
8327            <value>SRGB
8328              <notes>Tonemapping curve is defined by sRGB</notes>
8329            </value>
8330            <value>REC709
8331              <notes>Tonemapping curve is defined by ITU-R BT.709</notes>
8332            </value>
8333          </enum>
8334          <description> Tonemapping curve to use when android.tonemap.mode is
8335          PRESET_CURVE
8336          </description>
8337          <details>
8338          The tonemap curve will be defined by specified standard.
8339
8340          sRGB (approximated by 16 control points):
8341
8342          ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
8343
8344          Rec. 709 (approximated by 16 control points):
8345
8346          ![Rec. 709 tonemapping curve](android.tonemap.curveRed/rec709_tonemap.png)
8347
8348          Note that above figures show a 16 control points approximation of preset
8349          curves. Camera devices may apply a different approximation to the curve.
8350          </details>
8351        </entry>
8352      </controls>
8353      <dynamic>
8354        <clone entry="android.tonemap.gamma" kind="controls">
8355        </clone>
8356        <clone entry="android.tonemap.presetCurve" kind="controls">
8357        </clone>
8358      </dynamic>
8359    </section>
8360    <section name="led">
8361      <controls>
8362        <entry name="transmit" type="byte" visibility="hidden" optional="true"
8363               enum="true" typedef="boolean">
8364          <enum>
8365            <value>OFF</value>
8366            <value>ON</value>
8367          </enum>
8368          <description>This LED is nominally used to indicate to the user
8369          that the camera is powered on and may be streaming images back to the
8370          Application Processor. In certain rare circumstances, the OS may
8371          disable this when video is processed locally and not transmitted to
8372          any untrusted applications.
8373
8374          In particular, the LED *must* always be on when the data could be
8375          transmitted off the device. The LED *should* always be on whenever
8376          data is stored locally on the device.
8377
8378          The LED *may* be off if a trusted application is using the data that
8379          doesn't violate the above rules.
8380          </description>
8381        </entry>
8382      </controls>
8383      <dynamic>
8384        <clone entry="android.led.transmit" kind="controls"></clone>
8385      </dynamic>
8386      <static>
8387        <entry name="availableLeds" type="byte" visibility="hidden" optional="true"
8388               enum="true"
8389               container="array">
8390          <array>
8391            <size>n</size>
8392          </array>
8393          <enum>
8394            <value>TRANSMIT
8395              <notes>android.led.transmit control is used.</notes>
8396            </value>
8397          </enum>
8398          <description>A list of camera LEDs that are available on this system.
8399          </description>
8400        </entry>
8401      </static>
8402    </section>
8403    <section name="info">
8404      <static>
8405        <entry name="supportedHardwareLevel" type="byte" visibility="public"
8406               enum="true" hwlevel="legacy">
8407          <enum>
8408            <value>
8409              LIMITED
8410              <notes>
8411              This camera device does not have enough capabilities to qualify as a `FULL` device or
8412              better.
8413
8414              Only the stream configurations listed in the `LEGACY` and `LIMITED` tables in the
8415              {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.
8416
8417              All `LIMITED` devices support the `BACKWARDS_COMPATIBLE` capability, indicating basic
8418              support for color image capture. The only exception is that the device may
8419              alternatively support only the `DEPTH_OUTPUT` capability, if it can only output depth
8420              measurements and not color images.
8421
8422              `LIMITED` devices and above require the use of android.control.aePrecaptureTrigger
8423              to lock exposure metering (and calculate flash power, for cameras with flash) before
8424              capturing a high-quality still image.
8425
8426              A `LIMITED` device that only lists the `BACKWARDS_COMPATIBLE` capability is only
8427              required to support full-automatic operation and post-processing (`OFF` is not
8428              supported for android.control.aeMode, android.control.afMode, or
8429              android.control.awbMode)
8430
8431              Additional capabilities may optionally be supported by a `LIMITED`-level device, and
8432              can be checked for in android.request.availableCapabilities.
8433              </notes>
8434            </value>
8435            <value>
8436              FULL
8437              <notes>
8438              This camera device is capable of supporting advanced imaging applications.
8439
8440              The stream configurations listed in the `FULL`, `LEGACY` and `LIMITED` tables in the
8441              {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.
8442
8443              A `FULL` device will support below capabilities:
8444
8445              * `BURST_CAPTURE` capability (android.request.availableCapabilities contains
8446                `BURST_CAPTURE`)
8447              * Per frame control (android.sync.maxLatency `==` PER_FRAME_CONTROL)
8448              * Manual sensor control (android.request.availableCapabilities contains `MANUAL_SENSOR`)
8449              * Manual post-processing control (android.request.availableCapabilities contains
8450                `MANUAL_POST_PROCESSING`)
8451              * The required exposure time range defined in android.sensor.info.exposureTimeRange
8452              * The required maxFrameDuration defined in android.sensor.info.maxFrameDuration
8453
8454              Note:
8455              Pre-API level 23, FULL devices also supported arbitrary cropping region
8456              (android.scaler.croppingType `== FREEFORM`); this requirement was relaxed in API level
8457              23, and `FULL` devices may only support `CENTERED` cropping.
8458              </notes>
8459            </value>
8460            <value>
8461              LEGACY
8462              <notes>
8463              This camera device is running in backward compatibility mode.
8464
8465              Only the stream configurations listed in the `LEGACY` table in the {@link
8466              ACameraDevice_createCaptureSession} documentation are supported.
8467
8468              A `LEGACY` device does not support per-frame control, manual sensor control, manual
8469              post-processing, arbitrary cropping regions, and has relaxed performance constraints.
8470              No additional capabilities beyond `BACKWARD_COMPATIBLE` will ever be listed by a
8471              `LEGACY` device in android.request.availableCapabilities.
8472
8473              In addition, the android.control.aePrecaptureTrigger is not functional on `LEGACY`
8474              devices. Instead, every request that includes a JPEG-format output target is treated
8475              as triggering a still capture, internally executing a precapture trigger.  This may
8476              fire the flash for flash power metering during precapture, and then fire the flash
8477              for the final capture, if a flash is available on the device and the AE mode is set to
8478              enable the flash.
8479              </notes>
8480            </value>
8481            <value>
8482              3
8483              <notes>
8484              This camera device is capable of YUV reprocessing and RAW data capture, in addition to
8485              FULL-level capabilities.
8486
8487              The stream configurations listed in the `LEVEL_3`, `RAW`, `FULL`, `LEGACY` and
8488              `LIMITED` tables in the {@link
8489              ACameraDevice_createCaptureSession}
8490              documentation are guaranteed to be supported.
8491
8492              The following additional capabilities are guaranteed to be supported:
8493
8494              * `YUV_REPROCESSING` capability (android.request.availableCapabilities contains
8495                `YUV_REPROCESSING`)
8496              * `RAW` capability (android.request.availableCapabilities contains
8497                `RAW`)
8498              </notes>
8499            </value>
8500          </enum>
8501          <description>
8502          Generally classifies the overall set of the camera device functionality.
8503          </description>
8504          <details>
8505          The supported hardware level is a high-level description of the camera device's
8506          capabilities, summarizing several capabilities into one field.  Each level adds additional
8507          features to the previous one, and is always a strict superset of the previous level.
8508          The ordering is `LEGACY &lt; LIMITED &lt; FULL &lt; LEVEL_3`.
8509
8510          Starting from `LEVEL_3`, the level enumerations are guaranteed to be in increasing
8511          numerical value as well. To check if a given device is at least at a given hardware level,
8512          the following code snippet can be used:
8513
8514              // Returns true if the device supports the required hardware level, or better.
8515              boolean isHardwareLevelSupported(CameraCharacteristics c, int requiredLevel) {
8516                  int deviceLevel = c.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
8517                  if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
8518                      return requiredLevel == deviceLevel;
8519                  }
8520                  // deviceLevel is not LEGACY, can use numerical sort
8521                  return requiredLevel &lt;= deviceLevel;
8522              }
8523
8524          At a high level, the levels are:
8525
8526          * `LEGACY` devices operate in a backwards-compatibility mode for older
8527            Android devices, and have very limited capabilities.
8528          * `LIMITED` devices represent the
8529            baseline feature set, and may also include additional capabilities that are
8530            subsets of `FULL`.
8531          * `FULL` devices additionally support per-frame manual control of sensor, flash, lens and
8532            post-processing settings, and image capture at a high rate.
8533          * `LEVEL_3` devices additionally support YUV reprocessing and RAW image capture, along
8534            with additional output stream configurations.
8535
8536          See the individual level enums for full descriptions of the supported capabilities.  The
8537          android.request.availableCapabilities entry describes the device's capabilities at a
8538          finer-grain level, if needed. In addition, many controls have their available settings or
8539          ranges defined in individual metadata tag entries in this document.
8540
8541          Some features are not part of any particular hardware level or capability and must be
8542          queried separately. These include:
8543
8544          * Calibrated timestamps (android.sensor.info.timestampSource `==` REALTIME)
8545          * Precision lens control (android.lens.info.focusDistanceCalibration `==` CALIBRATED)
8546          * Face detection (android.statistics.info.availableFaceDetectModes)
8547          * Optical or electrical image stabilization
8548            (android.lens.info.availableOpticalStabilization,
8549             android.control.availableVideoStabilizationModes)
8550
8551          </details>
8552          <hal_details>
8553          The camera 3 HAL device can implement one of three possible operational modes; LIMITED,
8554          FULL, and LEVEL_3.
8555
8556          FULL support or better is expected from new higher-end devices. Limited
8557          mode has hardware requirements roughly in line with those for a camera HAL device v1
8558          implementation, and is expected from older or inexpensive devices. Each level is a strict
8559          superset of the previous level, and they share the same essential operational flow.
8560
8561          For full details refer to "S3. Operational Modes" in camera3.h
8562
8563          Camera HAL3+ must not implement LEGACY mode. It is there for backwards compatibility in
8564          the `android.hardware.camera2` user-facing API only on HALv1 devices, and is implemented
8565          by the camera framework code.
8566          </hal_details>
8567        </entry>
8568      </static>
8569    </section>
8570    <section name="blackLevel">
8571      <controls>
8572        <entry name="lock" type="byte" visibility="public" enum="true"
8573               typedef="boolean" hwlevel="full">
8574          <enum>
8575            <value>OFF</value>
8576            <value>ON</value>
8577          </enum>
8578          <description> Whether black-level compensation is locked
8579          to its current values, or is free to vary.</description>
8580          <details>When set to `true` (ON), the values used for black-level
8581          compensation will not change until the lock is set to
8582          `false` (OFF).
8583
8584          Since changes to certain capture parameters (such as
8585          exposure time) may require resetting of black level
8586          compensation, the camera device must report whether setting
8587          the black level lock was successful in the output result
8588          metadata.
8589
8590          For example, if a sequence of requests is as follows:
8591
8592          * Request 1: Exposure = 10ms, Black level lock = OFF
8593          * Request 2: Exposure = 10ms, Black level lock = ON
8594          * Request 3: Exposure = 10ms, Black level lock = ON
8595          * Request 4: Exposure = 20ms, Black level lock = ON
8596          * Request 5: Exposure = 20ms, Black level lock = ON
8597          * Request 6: Exposure = 20ms, Black level lock = ON
8598
8599          And the exposure change in Request 4 requires the camera
8600          device to reset the black level offsets, then the output
8601          result metadata is expected to be:
8602
8603          * Result 1: Exposure = 10ms, Black level lock = OFF
8604          * Result 2: Exposure = 10ms, Black level lock = ON
8605          * Result 3: Exposure = 10ms, Black level lock = ON
8606          * Result 4: Exposure = 20ms, Black level lock = OFF
8607          * Result 5: Exposure = 20ms, Black level lock = ON
8608          * Result 6: Exposure = 20ms, Black level lock = ON
8609
8610          This indicates to the application that on frame 4, black
8611          levels were reset due to exposure value changes, and pixel
8612          values may not be consistent across captures.
8613
8614          The camera device will maintain the lock to the extent
8615          possible, only overriding the lock to OFF when changes to
8616          other request parameters require a black level recalculation
8617          or reset.
8618          </details>
8619          <hal_details>
8620          If for some reason black level locking is no longer possible
8621          (for example, the analog gain has changed, which forces
8622          black level offsets to be recalculated), then the HAL must
8623          override this request (and it must report 'OFF' when this
8624          does happen) until the next capture for which locking is
8625          possible again.</hal_details>
8626          <tag id="HAL2" />
8627        </entry>
8628      </controls>
8629      <dynamic>
8630        <clone entry="android.blackLevel.lock"
8631          kind="controls">
8632          <details>
8633            Whether the black level offset was locked for this frame.  Should be
8634            ON if android.blackLevel.lock was ON in the capture request, unless
8635            a change in other capture settings forced the camera device to
8636            perform a black level reset.
8637          </details>
8638        </clone>
8639      </dynamic>
8640    </section>
8641    <section name="sync">
8642      <dynamic>
8643        <entry name="frameNumber" type="int64" visibility="ndk_public"
8644               enum="true" hwlevel="legacy">
8645          <enum>
8646            <value id="-1">CONVERGING
8647              <notes>
8648              The current result is not yet fully synchronized to any request.
8649
8650              Synchronization is in progress, and reading metadata from this
8651              result may include a mix of data that have taken effect since the
8652              last synchronization time.
8653
8654              In some future result, within android.sync.maxLatency frames,
8655              this value will update to the actual frame number frame number
8656              the result is guaranteed to be synchronized to (as long as the
8657              request settings remain constant).
8658            </notes>
8659            </value>
8660            <value id="-2">UNKNOWN
8661              <notes>
8662              The current result's synchronization status is unknown.
8663
8664              The result may have already converged, or it may be in
8665              progress.  Reading from this result may include some mix
8666              of settings from past requests.
8667
8668              After a settings change, the new settings will eventually all
8669              take effect for the output buffers and results. However, this
8670              value will not change when that happens. Altering settings
8671              rapidly may provide outcomes using mixes of settings from recent
8672              requests.
8673
8674              This value is intended primarily for backwards compatibility with
8675              the older camera implementations (for android.hardware.Camera).
8676            </notes>
8677            </value>
8678          </enum>
8679          <description>The frame number corresponding to the last request
8680          with which the output result (metadata + buffers) has been fully
8681          synchronized.</description>
8682          <range>Either a non-negative value corresponding to a
8683          `frame_number`, or one of the two enums (CONVERGING / UNKNOWN).
8684          </range>
8685          <details>
8686          When a request is submitted to the camera device, there is usually a
8687          delay of several frames before the controls get applied. A camera
8688          device may either choose to account for this delay by implementing a
8689          pipeline and carefully submit well-timed atomic control updates, or
8690          it may start streaming control changes that span over several frame
8691          boundaries.
8692
8693          In the latter case, whenever a request's settings change relative to
8694          the previous submitted request, the full set of changes may take
8695          multiple frame durations to fully take effect. Some settings may
8696          take effect sooner (in less frame durations) than others.
8697
8698          While a set of control changes are being propagated, this value
8699          will be CONVERGING.
8700
8701          Once it is fully known that a set of control changes have been
8702          finished propagating, and the resulting updated control settings
8703          have been read back by the camera device, this value will be set
8704          to a non-negative frame number (corresponding to the request to
8705          which the results have synchronized to).
8706
8707          Older camera device implementations may not have a way to detect
8708          when all camera controls have been applied, and will always set this
8709          value to UNKNOWN.
8710
8711          FULL capability devices will always have this value set to the
8712          frame number of the request corresponding to this result.
8713
8714          _Further details_:
8715
8716          * Whenever a request differs from the last request, any future
8717          results not yet returned may have this value set to CONVERGING (this
8718          could include any in-progress captures not yet returned by the camera
8719          device, for more details see pipeline considerations below).
8720          * Submitting a series of multiple requests that differ from the
8721          previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3)
8722          moves the new synchronization frame to the last non-repeating
8723          request (using the smallest frame number from the contiguous list of
8724          repeating requests).
8725          * Submitting the same request repeatedly will not change this value
8726          to CONVERGING, if it was already a non-negative value.
8727          * When this value changes to non-negative, that means that all of the
8728          metadata controls from the request have been applied, all of the
8729          metadata controls from the camera device have been read to the
8730          updated values (into the result), and all of the graphics buffers
8731          corresponding to this result are also synchronized to the request.
8732
8733          _Pipeline considerations_:
8734
8735          Submitting a request with updated controls relative to the previously
8736          submitted requests may also invalidate the synchronization state
8737          of all the results corresponding to currently in-flight requests.
8738
8739          In other words, results for this current request and up to
8740          android.request.pipelineMaxDepth prior requests may have their
8741          android.sync.frameNumber change to CONVERGING.
8742          </details>
8743          <hal_details>
8744          Using UNKNOWN here is illegal unless android.sync.maxLatency
8745          is also UNKNOWN.
8746
8747          FULL capability devices should simply set this value to the
8748          `frame_number` of the request this result corresponds to.
8749          </hal_details>
8750          <tag id="V1" />
8751        </entry>
8752      </dynamic>
8753      <static>
8754        <entry name="maxLatency" type="int32" visibility="public" enum="true"
8755               hwlevel="legacy">
8756          <enum>
8757            <value id="0">PER_FRAME_CONTROL
8758              <notes>
8759              Every frame has the requests immediately applied.
8760
8761              Changing controls over multiple requests one after another will
8762              produce results that have those controls applied atomically
8763              each frame.
8764
8765              All FULL capability devices will have this as their maxLatency.
8766              </notes>
8767            </value>
8768            <value id="-1">UNKNOWN
8769              <notes>
8770              Each new frame has some subset (potentially the entire set)
8771              of the past requests applied to the camera settings.
8772
8773              By submitting a series of identical requests, the camera device
8774              will eventually have the camera settings applied, but it is
8775              unknown when that exact point will be.
8776
8777              All LEGACY capability devices will have this as their maxLatency.
8778              </notes>
8779            </value>
8780          </enum>
8781          <description>
8782          The maximum number of frames that can occur after a request
8783          (different than the previous) has been submitted, and before the
8784          result's state becomes synchronized.
8785          </description>
8786          <units>Frame counts</units>
8787          <range>A positive value, PER_FRAME_CONTROL, or UNKNOWN.</range>
8788          <details>
8789          This defines the maximum distance (in number of metadata results),
8790          between the frame number of the request that has new controls to apply
8791          and the frame number of the result that has all the controls applied.
8792
8793          In other words this acts as an upper boundary for how many frames
8794          must occur before the camera device knows for a fact that the new
8795          submitted camera settings have been applied in outgoing frames.
8796          </details>
8797          <hal_details>
8798          For example if maxLatency was 2,
8799
8800              initial request = X (repeating)
8801              request1 = X
8802              request2 = Y
8803              request3 = Y
8804              request4 = Y
8805
8806              where requestN has frameNumber N, and the first of the repeating
8807              initial request's has frameNumber F (and F &lt; 1).
8808
8809              initial result = X' + { android.sync.frameNumber == F }
8810              result1 = X' + { android.sync.frameNumber == F }
8811              result2 = X' + { android.sync.frameNumber == CONVERGING }
8812              result3 = X' + { android.sync.frameNumber == CONVERGING }
8813              result4 = X' + { android.sync.frameNumber == 2 }
8814
8815              where resultN has frameNumber N.
8816
8817          Since `result4` has a `frameNumber == 4` and
8818          `android.sync.frameNumber == 2`, the distance is clearly
8819          `4 - 2 = 2`.
8820
8821          Use `frame_count` from camera3_request_t instead of
8822          android.request.frameCount or
8823          `{@link android.hardware.camera2.CaptureResult#getFrameNumber}`.
8824
8825          LIMITED devices are strongly encouraged to use a non-negative
8826          value. If UNKNOWN is used here then app developers do not have a way
8827          to know when sensor settings have been applied.
8828          </hal_details>
8829          <tag id="V1" />
8830        </entry>
8831      </static>
8832    </section>
8833    <section name="reprocess">
8834      <controls>
8835        <entry name="effectiveExposureFactor" type="float" visibility="java_public" hwlevel="limited">
8836            <description>
8837            The amount of exposure time increase factor applied to the original output
8838            frame by the application processing before sending for reprocessing.
8839            </description>
8840            <units>Relative exposure time increase factor.</units>
8841            <range> &amp;gt;= 1.0</range>
8842            <details>
8843            This is optional, and will be supported if the camera device supports YUV_REPROCESSING
8844            capability (android.request.availableCapabilities contains YUV_REPROCESSING).
8845
8846            For some YUV reprocessing use cases, the application may choose to filter the original
8847            output frames to effectively reduce the noise to the same level as a frame that was
8848            captured with longer exposure time. To be more specific, assuming the original captured
8849            images were captured with a sensitivity of S and an exposure time of T, the model in
8850            the camera device is that the amount of noise in the image would be approximately what
8851            would be expected if the original capture parameters had been a sensitivity of
8852            S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather
8853            than S and T respectively. If the captured images were processed by the application
8854            before being sent for reprocessing, then the application may have used image processing
8855            algorithms and/or multi-frame image fusion to reduce the noise in the
8856            application-processed images (input images). By using the effectiveExposureFactor
8857            control, the application can communicate to the camera device the actual noise level
8858            improvement in the application-processed image. With this information, the camera
8859            device can select appropriate noise reduction and edge enhancement parameters to avoid
8860            excessive noise reduction (android.noiseReduction.mode) and insufficient edge
8861            enhancement (android.edge.mode) being applied to the reprocessed frames.
8862
8863            For example, for multi-frame image fusion use case, the application may fuse
8864            multiple output frames together to a final frame for reprocessing. When N image are
8865            fused into 1 image for reprocessing, the exposure time increase factor could be up to
8866            square root of N (based on a simple photon shot noise model). The camera device will
8867            adjust the reprocessing noise reduction and edge enhancement parameters accordingly to
8868            produce the best quality images.
8869
8870            This is relative factor, 1.0 indicates the application hasn't processed the input
8871            buffer in a way that affects its effective exposure time.
8872
8873            This control is only effective for YUV reprocessing capture request. For noise
8874            reduction reprocessing, it is only effective when `android.noiseReduction.mode != OFF`.
8875            Similarly, for edge enhancement reprocessing, it is only effective when
8876            `android.edge.mode != OFF`.
8877            </details>
8878          <tag id="REPROC" />
8879        </entry>
8880      </controls>
8881      <dynamic>
8882      <clone entry="android.reprocess.effectiveExposureFactor" kind="controls">
8883      </clone>
8884      </dynamic>
8885      <static>
8886        <entry name="maxCaptureStall" type="int32" visibility="java_public" hwlevel="limited">
8887          <description>
8888          The maximal camera capture pipeline stall (in unit of frame count) introduced by a
8889          reprocess capture request.
8890          </description>
8891          <units>Number of frames.</units>
8892          <range> &amp;lt;= 4</range>
8893          <details>
8894          The key describes the maximal interference that one reprocess (input) request
8895          can introduce to the camera simultaneous streaming of regular (output) capture
8896          requests, including repeating requests.
8897
8898          When a reprocessing capture request is submitted while a camera output repeating request
8899          (e.g. preview) is being served by the camera device, it may preempt the camera capture
8900          pipeline for at least one frame duration so that the camera device is unable to process
8901          the following capture request in time for the next sensor start of exposure boundary.
8902          When this happens, the application may observe a capture time gap (longer than one frame
8903          duration) between adjacent capture output frames, which usually exhibits as preview
8904          glitch if the repeating request output targets include a preview surface. This key gives
8905          the worst-case number of frame stall introduced by one reprocess request with any kind of
8906          formats/sizes combination.
8907
8908          If this key reports 0, it means a reprocess request doesn't introduce any glitch to the
8909          ongoing camera repeating request outputs, as if this reprocess request is never issued.
8910
8911          This key is supported if the camera device supports PRIVATE or YUV reprocessing (
8912          i.e. android.request.availableCapabilities contains PRIVATE_REPROCESSING or
8913          YUV_REPROCESSING).
8914          </details>
8915          <tag id="REPROC" />
8916        </entry>
8917      </static>
8918    </section>
8919    <section name="depth">
8920      <static>
8921        <entry name="maxDepthSamples" type="int32" visibility="system" hwlevel="limited">
8922          <description>Maximum number of points that a depth point cloud may contain.
8923          </description>
8924          <details>
8925            If a camera device supports outputting depth range data in the form of a depth point
8926            cloud ({@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD}), this is the maximum
8927            number of points an output buffer may contain.
8928
8929            Any given buffer may contain between 0 and maxDepthSamples points, inclusive.
8930            If output in the depth point cloud format is not supported, this entry will
8931            not be defined.
8932          </details>
8933          <tag id="DEPTH" />
8934        </entry>
8935        <entry name="availableDepthStreamConfigurations" type="int32" visibility="ndk_public"
8936               enum="true" container="array" typedef="streamConfiguration" hwlevel="limited">
8937          <array>
8938            <size>n</size>
8939            <size>4</size>
8940          </array>
8941          <enum>
8942            <value>OUTPUT</value>
8943            <value>INPUT</value>
8944          </enum>
8945          <description>The available depth dataspace stream
8946          configurations that this camera device supports
8947          (i.e. format, width, height, output/input stream).
8948          </description>
8949          <details>
8950            These are output stream configurations for use with
8951            dataSpace HAL_DATASPACE_DEPTH. The configurations are
8952            listed as `(format, width, height, input?)` tuples.
8953
8954            Only devices that support depth output for at least
8955            the HAL_PIXEL_FORMAT_Y16 dense depth map may include
8956            this entry.
8957
8958            A device that also supports the HAL_PIXEL_FORMAT_BLOB
8959            sparse depth point cloud must report a single entry for
8960            the format in this list as `(HAL_PIXEL_FORMAT_BLOB,
8961            android.depth.maxDepthSamples, 1, OUTPUT)` in addition to
8962            the entries for HAL_PIXEL_FORMAT_Y16.
8963          </details>
8964          <tag id="DEPTH" />
8965        </entry>
8966        <entry name="availableDepthMinFrameDurations" type="int64" visibility="ndk_public"
8967               container="array" typedef="streamConfigurationDuration" hwlevel="limited">
8968          <array>
8969            <size>4</size>
8970            <size>n</size>
8971          </array>
8972          <description>This lists the minimum frame duration for each
8973          format/size combination for depth output formats.
8974          </description>
8975          <units>(format, width, height, ns) x n</units>
8976          <details>
8977          This should correspond to the frame duration when only that
8978          stream is active, with all processing (typically in android.*.mode)
8979          set to either OFF or FAST.
8980
8981          When multiple streams are used in a request, the minimum frame
8982          duration will be max(individual stream min durations).
8983
8984          The minimum frame duration of a stream (of a particular format, size)
8985          is the same regardless of whether the stream is input or output.
8986
8987          See android.sensor.frameDuration and
8988          android.scaler.availableStallDurations for more details about
8989          calculating the max frame rate.
8990          </details>
8991          <tag id="DEPTH" />
8992        </entry>
8993        <entry name="availableDepthStallDurations" type="int64" visibility="ndk_public"
8994               container="array" typedef="streamConfigurationDuration" hwlevel="limited">
8995          <array>
8996            <size>4</size>
8997            <size>n</size>
8998          </array>
8999          <description>This lists the maximum stall duration for each
9000          output format/size combination for depth streams.
9001          </description>
9002          <units>(format, width, height, ns) x n</units>
9003          <details>
9004          A stall duration is how much extra time would get added
9005          to the normal minimum frame duration for a repeating request
9006          that has streams with non-zero stall.
9007
9008          This functions similarly to
9009          android.scaler.availableStallDurations for depth
9010          streams.
9011
9012          All depth output stream formats may have a nonzero stall
9013          duration.
9014          </details>
9015          <tag id="DEPTH" />
9016        </entry>
9017        <entry name="depthIsExclusive" type="byte" visibility="public"
9018               enum="true" typedef="boolean" hwlevel="limited">
9019          <enum>
9020            <value>FALSE</value>
9021            <value>TRUE</value>
9022          </enum>
9023          <description>Indicates whether a capture request may target both a
9024          DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
9025          YUV_420_888, JPEG, or RAW) simultaneously.
9026          </description>
9027          <details>
9028          If TRUE, including both depth and color outputs in a single
9029          capture request is not supported. An application must interleave color
9030          and depth requests.  If FALSE, a single request can target both types
9031          of output.
9032
9033          Typically, this restriction exists on camera devices that
9034          need to emit a specific pattern or wavelength of light to
9035          measure depth values, which causes the color image to be
9036          corrupted during depth measurement.
9037          </details>
9038        </entry>
9039      </static>
9040    </section>
9041  </namespace>
9042</metadata>
9043