• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2;
18 
19 import android.annotation.NonNull;
20 import android.annotation.Nullable;
21 import android.hardware.camera2.impl.CameraMetadataNative;
22 import android.hardware.camera2.impl.CaptureResultExtras;
23 import android.hardware.camera2.impl.PublicKey;
24 import android.hardware.camera2.impl.SyntheticKey;
25 import android.hardware.camera2.utils.TypeReference;
26 import android.util.Log;
27 import android.util.Rational;
28 
29 import java.util.List;
30 
31 /**
32  * <p>The subset of the results of a single image capture from the image sensor.</p>
33  *
34  * <p>Contains a subset of the final configuration for the capture hardware (sensor, lens,
35  * flash), the processing pipeline, the control algorithms, and the output
36  * buffers.</p>
37  *
38  * <p>CaptureResults are produced by a {@link CameraDevice} after processing a
39  * {@link CaptureRequest}. All properties listed for capture requests can also
40  * be queried on the capture result, to determine the final values used for
41  * capture. The result also includes additional metadata about the state of the
42  * camera device during the capture.</p>
43  *
44  * <p>Not all properties returned by {@link CameraCharacteristics#getAvailableCaptureResultKeys()}
45  * are necessarily available. Some results are {@link CaptureResult partial} and will
46  * not have every key set. Only {@link TotalCaptureResult total} results are guaranteed to have
47  * every key available that was enabled by the request.</p>
48  *
49  * <p>{@link CaptureResult} objects are immutable.</p>
50  *
51  */
52 public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
53 
54     private static final String TAG = "CaptureResult";
55     private static final boolean VERBOSE = false;
56 
57     /**
58      * A {@code Key} is used to do capture result field lookups with
59      * {@link CaptureResult#get}.
60      *
61      * <p>For example, to get the timestamp corresponding to the exposure of the first row:
62      * <code><pre>
63      * long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
64      * </pre></code>
65      * </p>
66      *
67      * <p>To enumerate over all possible keys for {@link CaptureResult}, see
68      * {@link CameraCharacteristics#getAvailableCaptureResultKeys}.</p>
69      *
70      * @see CaptureResult#get
71      * @see CameraCharacteristics#getAvailableCaptureResultKeys
72      */
73     public final static class Key<T> {
74         private final CameraMetadataNative.Key<T> mKey;
75 
76         /**
77          * Visible for testing and vendor extensions only.
78          *
79          * @hide
80          */
Key(String name, Class<T> type, long vendorId)81         public Key(String name, Class<T> type, long vendorId) {
82             mKey = new CameraMetadataNative.Key<T>(name, type, vendorId);
83         }
84 
85         /**
86          * Visible for testing and vendor extensions only.
87          *
88          * @hide
89          */
Key(String name, Class<T> type)90         public Key(String name, Class<T> type) {
91             mKey = new CameraMetadataNative.Key<T>(name, type);
92         }
93 
94         /**
95          * Visible for testing and vendor extensions only.
96          *
97          * @hide
98          */
Key(String name, TypeReference<T> typeReference)99         public Key(String name, TypeReference<T> typeReference) {
100             mKey = new CameraMetadataNative.Key<T>(name, typeReference);
101         }
102 
103         /**
104          * Return a camelCase, period separated name formatted like:
105          * {@code "root.section[.subsections].name"}.
106          *
107          * <p>Built-in keys exposed by the Android SDK are always prefixed with {@code "android."};
108          * keys that are device/platform-specific are prefixed with {@code "com."}.</p>
109          *
110          * <p>For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would
111          * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device
112          * specific key might look like {@code "com.google.nexus.data.private"}.</p>
113          *
114          * @return String representation of the key name
115          */
116         @NonNull
getName()117         public String getName() {
118             return mKey.getName();
119         }
120 
121         /**
122          * Return vendor tag id.
123          *
124          * @hide
125          */
getVendorId()126         public long getVendorId() {
127             return mKey.getVendorId();
128         }
129 
130         /**
131          * {@inheritDoc}
132          */
133         @Override
hashCode()134         public final int hashCode() {
135             return mKey.hashCode();
136         }
137 
138         /**
139          * {@inheritDoc}
140          */
141         @SuppressWarnings("unchecked")
142         @Override
equals(Object o)143         public final boolean equals(Object o) {
144             return o instanceof Key && ((Key<T>)o).mKey.equals(mKey);
145         }
146 
147         /**
148          * Return this {@link Key} as a string representation.
149          *
150          * <p>{@code "CaptureResult.Key(%s)"}, where {@code %s} represents
151          * the name of this key as returned by {@link #getName}.</p>
152          *
153          * @return string representation of {@link Key}
154          */
155         @NonNull
156         @Override
toString()157         public String toString() {
158             return String.format("CaptureResult.Key(%s)", mKey.getName());
159         }
160 
161         /**
162          * Visible for CameraMetadataNative implementation only; do not use.
163          *
164          * TODO: Make this private or remove it altogether.
165          *
166          * @hide
167          */
getNativeKey()168         public CameraMetadataNative.Key<T> getNativeKey() {
169             return mKey;
170         }
171 
172         @SuppressWarnings({ "unchecked" })
Key(CameraMetadataNative.Key<?> nativeKey)173         /*package*/ Key(CameraMetadataNative.Key<?> nativeKey) {
174             mKey = (CameraMetadataNative.Key<T>) nativeKey;
175         }
176     }
177 
178     private final CameraMetadataNative mResults;
179     private final CaptureRequest mRequest;
180     private final int mSequenceId;
181     private final long mFrameNumber;
182 
183     /**
184      * Takes ownership of the passed-in properties object
185      *
186      * <p>For internal use only</p>
187      * @hide
188      */
CaptureResult(CameraMetadataNative results, CaptureRequest parent, CaptureResultExtras extras)189     public CaptureResult(CameraMetadataNative results, CaptureRequest parent,
190             CaptureResultExtras extras) {
191         if (results == null) {
192             throw new IllegalArgumentException("results was null");
193         }
194 
195         if (parent == null) {
196             throw new IllegalArgumentException("parent was null");
197         }
198 
199         if (extras == null) {
200             throw new IllegalArgumentException("extras was null");
201         }
202 
203         mResults = CameraMetadataNative.move(results);
204         if (mResults.isEmpty()) {
205             throw new AssertionError("Results must not be empty");
206         }
207         setNativeInstance(mResults);
208         mRequest = parent;
209         mSequenceId = extras.getRequestId();
210         mFrameNumber = extras.getFrameNumber();
211     }
212 
213     /**
214      * Returns a copy of the underlying {@link CameraMetadataNative}.
215      * @hide
216      */
getNativeCopy()217     public CameraMetadataNative getNativeCopy() {
218         return new CameraMetadataNative(mResults);
219     }
220 
221     /**
222      * Creates a request-less result.
223      *
224      * <p><strong>For testing only.</strong></p>
225      * @hide
226      */
CaptureResult(CameraMetadataNative results, int sequenceId)227     public CaptureResult(CameraMetadataNative results, int sequenceId) {
228         if (results == null) {
229             throw new IllegalArgumentException("results was null");
230         }
231 
232         mResults = CameraMetadataNative.move(results);
233         if (mResults.isEmpty()) {
234             throw new AssertionError("Results must not be empty");
235         }
236 
237         setNativeInstance(mResults);
238         mRequest = null;
239         mSequenceId = sequenceId;
240         mFrameNumber = -1;
241     }
242 
243     /**
244      * Get a capture result field value.
245      *
246      * <p>The field definitions can be found in {@link CaptureResult}.</p>
247      *
248      * <p>Querying the value for the same key more than once will return a value
249      * which is equal to the previous queried value.</p>
250      *
251      * @throws IllegalArgumentException if the key was not valid
252      *
253      * @param key The result field to read.
254      * @return The value of that key, or {@code null} if the field is not set.
255      */
256     @Nullable
get(Key<T> key)257     public <T> T get(Key<T> key) {
258         T value = mResults.get(key);
259         if (VERBOSE) Log.v(TAG, "#get for Key = " + key.getName() + ", returned value = " + value);
260         return value;
261     }
262 
263     /**
264      * {@inheritDoc}
265      * @hide
266      */
267     @SuppressWarnings("unchecked")
268     @Override
getProtected(Key<?> key)269     protected <T> T getProtected(Key<?> key) {
270         return (T) mResults.get(key);
271     }
272 
273     /**
274      * {@inheritDoc}
275      * @hide
276      */
277     @SuppressWarnings("unchecked")
278     @Override
getKeyClass()279     protected Class<Key<?>> getKeyClass() {
280         Object thisClass = Key.class;
281         return (Class<Key<?>>)thisClass;
282     }
283 
284     /**
285      * Dumps the native metadata contents to logcat.
286      *
287      * <p>Visibility for testing/debugging only. The results will not
288      * include any synthesized keys, as they are invisible to the native layer.</p>
289      *
290      * @hide
291      */
dumpToLog()292     public void dumpToLog() {
293         mResults.dumpToLog();
294     }
295 
296     /**
297      * {@inheritDoc}
298      */
299     @Override
300     @NonNull
getKeys()301     public List<Key<?>> getKeys() {
302         // Force the javadoc for this function to show up on the CaptureResult page
303         return super.getKeys();
304     }
305 
306     /**
307      * Get the request associated with this result.
308      *
309      * <p>Whenever a request has been fully or partially captured, with
310      * {@link CameraCaptureSession.CaptureCallback#onCaptureCompleted} or
311      * {@link CameraCaptureSession.CaptureCallback#onCaptureProgressed}, the {@code result}'s
312      * {@code getRequest()} will return that {@code request}.
313      * </p>
314      *
315      * <p>For example,
316      * <code><pre>cameraDevice.capture(someRequest, new CaptureCallback() {
317      *     {@literal @}Override
318      *     void onCaptureCompleted(CaptureRequest myRequest, CaptureResult myResult) {
319      *         assert(myResult.getRequest.equals(myRequest) == true);
320      *     }
321      * }, null);
322      * </code></pre>
323      * </p>
324      *
325      * @return The request associated with this result. Never {@code null}.
326      */
327     @NonNull
getRequest()328     public CaptureRequest getRequest() {
329         return mRequest;
330     }
331 
332     /**
333      * Get the frame number associated with this result.
334      *
335      * <p>Whenever a request has been processed, regardless of failure or success,
336      * it gets a unique frame number assigned to its future result/failure.</p>
337      *
338      * <p>For the same type of request (capturing from the camera device or reprocessing), this
339      * value monotonically increments, starting with 0, for every new result or failure and the
340      * scope is the lifetime of the {@link CameraDevice}. Between different types of requests,
341      * the frame number may not monotonically increment. For example, the frame number of a newer
342      * reprocess result may be smaller than the frame number of an older result of capturing new
343      * images from the camera device, but the frame number of a newer reprocess result will never be
344      * smaller than the frame number of an older reprocess result.</p>
345      *
346      * @return The frame number
347      *
348      * @see CameraDevice#createCaptureRequest
349      * @see CameraDevice#createReprocessCaptureRequest
350      */
getFrameNumber()351     public long getFrameNumber() {
352         return mFrameNumber;
353     }
354 
355     /**
356      * The sequence ID for this failure that was returned by the
357      * {@link CameraCaptureSession#capture} family of functions.
358      *
359      * <p>The sequence ID is a unique monotonically increasing value starting from 0,
360      * incremented every time a new group of requests is submitted to the CameraDevice.</p>
361      *
362      * @return int The ID for the sequence of requests that this capture result is a part of
363      *
364      * @see CameraDevice.CaptureCallback#onCaptureSequenceCompleted
365      * @see CameraDevice.CaptureCallback#onCaptureSequenceAborted
366      */
getSequenceId()367     public int getSequenceId() {
368         return mSequenceId;
369     }
370 
371     /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
372      * The key entries below this point are generated from metadata
373      * definitions in /system/media/camera/docs. Do not modify by hand or
374      * modify the comment blocks at the start or end.
375      *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/
376 
377     /**
378      * <p>The mode control selects how the image data is converted from the
379      * sensor's native color into linear sRGB color.</p>
380      * <p>When auto-white balance (AWB) is enabled with {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, this
381      * control is overridden by the AWB routine. When AWB is disabled, the
382      * application controls how the color mapping is performed.</p>
383      * <p>We define the expected processing pipeline below. For consistency
384      * across devices, this is always the case with TRANSFORM_MATRIX.</p>
385      * <p>When either FULL or HIGH_QUALITY is used, the camera device may
386      * do additional processing but {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
387      * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} will still be provided by the
388      * camera device (in the results) and be roughly correct.</p>
389      * <p>Switching to TRANSFORM_MATRIX and using the data provided from
390      * FAST or HIGH_QUALITY will yield a picture with the same white point
391      * as what was produced by the camera device in the earlier frame.</p>
392      * <p>The expected processing pipeline is as follows:</p>
393      * <p><img alt="White balance processing pipeline" src="../../../../images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png" /></p>
394      * <p>The white balance is encoded by two values, a 4-channel white-balance
395      * gain vector (applied in the Bayer domain), and a 3x3 color transform
396      * matrix (applied after demosaic).</p>
397      * <p>The 4-channel white-balance gains are defined as:</p>
398      * <pre><code>{@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} = [ R G_even G_odd B ]
399      * </code></pre>
400      * <p>where <code>G_even</code> is the gain for green pixels on even rows of the
401      * output, and <code>G_odd</code> is the gain for green pixels on the odd rows.
402      * These may be identical for a given camera device implementation; if
403      * the camera device does not support a separate gain for even/odd green
404      * channels, it will use the <code>G_even</code> value, and write <code>G_odd</code> equal to
405      * <code>G_even</code> in the output result metadata.</p>
406      * <p>The matrices for color transforms are defined as a 9-entry vector:</p>
407      * <pre><code>{@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
408      * </code></pre>
409      * <p>which define a transform from input sensor colors, <code>P_in = [ r g b ]</code>,
410      * to output linear sRGB, <code>P_out = [ r' g' b' ]</code>,</p>
411      * <p>with colors as follows:</p>
412      * <pre><code>r' = I0r + I1g + I2b
413      * g' = I3r + I4g + I5b
414      * b' = I6r + I7g + I8b
415      * </code></pre>
416      * <p>Both the input and output value ranges must match. Overflow/underflow
417      * values are clipped to fit within the range.</p>
418      * <p><b>Possible values:</b>
419      * <ul>
420      *   <li>{@link #COLOR_CORRECTION_MODE_TRANSFORM_MATRIX TRANSFORM_MATRIX}</li>
421      *   <li>{@link #COLOR_CORRECTION_MODE_FAST FAST}</li>
422      *   <li>{@link #COLOR_CORRECTION_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
423      * </ul></p>
424      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
425      * <p><b>Full capability</b> -
426      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
427      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
428      *
429      * @see CaptureRequest#COLOR_CORRECTION_GAINS
430      * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
431      * @see CaptureRequest#CONTROL_AWB_MODE
432      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
433      * @see #COLOR_CORRECTION_MODE_TRANSFORM_MATRIX
434      * @see #COLOR_CORRECTION_MODE_FAST
435      * @see #COLOR_CORRECTION_MODE_HIGH_QUALITY
436      */
437     @PublicKey
438     public static final Key<Integer> COLOR_CORRECTION_MODE =
439             new Key<Integer>("android.colorCorrection.mode", int.class);
440 
441     /**
442      * <p>A color transform matrix to use to transform
443      * from sensor RGB color space to output linear sRGB color space.</p>
444      * <p>This matrix is either set by the camera device when the request
445      * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or
446      * directly by the application in the request when the
447      * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.</p>
448      * <p>In the latter case, the camera device may round the matrix to account
449      * for precision issues; the final rounded matrix should be reported back
450      * in this matrix result metadata. The transform should keep the magnitude
451      * of the output color values within <code>[0, 1.0]</code> (assuming input color
452      * values is within the normalized range <code>[0, 1.0]</code>), or clipping may occur.</p>
453      * <p>The valid range of each matrix element varies on different devices, but
454      * values within [-1.5, 3.0] are guaranteed not to be clipped.</p>
455      * <p><b>Units</b>: Unitless scale factors</p>
456      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
457      * <p><b>Full capability</b> -
458      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
459      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
460      *
461      * @see CaptureRequest#COLOR_CORRECTION_MODE
462      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
463      */
464     @PublicKey
465     public static final Key<android.hardware.camera2.params.ColorSpaceTransform> COLOR_CORRECTION_TRANSFORM =
466             new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.colorCorrection.transform", android.hardware.camera2.params.ColorSpaceTransform.class);
467 
468     /**
469      * <p>Gains applying to Bayer raw color channels for
470      * white-balance.</p>
471      * <p>These per-channel gains are either set by the camera device
472      * when the request {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not
473      * TRANSFORM_MATRIX, or directly by the application in the
474      * request when the {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is
475      * TRANSFORM_MATRIX.</p>
476      * <p>The gains in the result metadata are the gains actually
477      * applied by the camera device to the current frame.</p>
478      * <p>The valid range of gains varies on different devices, but gains
479      * between [1.0, 3.0] are guaranteed not to be clipped. Even if a given
480      * device allows gains below 1.0, this is usually not recommended because
481      * this can create color artifacts.</p>
482      * <p><b>Units</b>: Unitless gain factors</p>
483      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
484      * <p><b>Full capability</b> -
485      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
486      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
487      *
488      * @see CaptureRequest#COLOR_CORRECTION_MODE
489      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
490      */
491     @PublicKey
492     public static final Key<android.hardware.camera2.params.RggbChannelVector> COLOR_CORRECTION_GAINS =
493             new Key<android.hardware.camera2.params.RggbChannelVector>("android.colorCorrection.gains", android.hardware.camera2.params.RggbChannelVector.class);
494 
495     /**
496      * <p>Mode of operation for the chromatic aberration correction algorithm.</p>
497      * <p>Chromatic (color) aberration is caused by the fact that different wavelengths of light
498      * can not focus on the same point after exiting from the lens. This metadata defines
499      * the high level control of chromatic aberration correction algorithm, which aims to
500      * minimize the chromatic artifacts that may occur along the object boundaries in an
501      * image.</p>
502      * <p>FAST/HIGH_QUALITY both mean that camera device determined aberration
503      * correction will be applied. HIGH_QUALITY mode indicates that the camera device will
504      * use the highest-quality aberration correction algorithms, even if it slows down
505      * capture rate. FAST means the camera device will not slow down capture rate when
506      * applying aberration correction.</p>
507      * <p>LEGACY devices will always be in FAST mode.</p>
508      * <p><b>Possible values:</b>
509      * <ul>
510      *   <li>{@link #COLOR_CORRECTION_ABERRATION_MODE_OFF OFF}</li>
511      *   <li>{@link #COLOR_CORRECTION_ABERRATION_MODE_FAST FAST}</li>
512      *   <li>{@link #COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
513      * </ul></p>
514      * <p><b>Available values for this device:</b><br>
515      * {@link CameraCharacteristics#COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES android.colorCorrection.availableAberrationModes}</p>
516      * <p>This key is available on all devices.</p>
517      *
518      * @see CameraCharacteristics#COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES
519      * @see #COLOR_CORRECTION_ABERRATION_MODE_OFF
520      * @see #COLOR_CORRECTION_ABERRATION_MODE_FAST
521      * @see #COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY
522      */
523     @PublicKey
524     public static final Key<Integer> COLOR_CORRECTION_ABERRATION_MODE =
525             new Key<Integer>("android.colorCorrection.aberrationMode", int.class);
526 
527     /**
528      * <p>The desired setting for the camera device's auto-exposure
529      * algorithm's antibanding compensation.</p>
530      * <p>Some kinds of lighting fixtures, such as some fluorescent
531      * lights, flicker at the rate of the power supply frequency
532      * (60Hz or 50Hz, depending on country). While this is
533      * typically not noticeable to a person, it can be visible to
534      * a camera device. If a camera sets its exposure time to the
535      * wrong value, the flicker may become visible in the
536      * viewfinder as flicker or in a final captured image, as a
537      * set of variable-brightness bands across the image.</p>
538      * <p>Therefore, the auto-exposure routines of camera devices
539      * include antibanding routines that ensure that the chosen
540      * exposure value will not cause such banding. The choice of
541      * exposure time depends on the rate of flicker, which the
542      * camera device can detect automatically, or the expected
543      * rate can be selected by the application using this
544      * control.</p>
545      * <p>A given camera device may not support all of the possible
546      * options for the antibanding mode. The
547      * {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES android.control.aeAvailableAntibandingModes} key contains
548      * the available modes for a given camera device.</p>
549      * <p>AUTO mode is the default if it is available on given
550      * camera device. When AUTO mode is not available, the
551      * default will be either 50HZ or 60HZ, and both 50HZ
552      * and 60HZ will be available.</p>
553      * <p>If manual exposure control is enabled (by setting
554      * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} to OFF),
555      * then this setting has no effect, and the application must
556      * ensure it selects exposure times that do not cause banding
557      * issues. The {@link CaptureResult#STATISTICS_SCENE_FLICKER android.statistics.sceneFlicker} key can assist
558      * the application in this.</p>
559      * <p><b>Possible values:</b>
560      * <ul>
561      *   <li>{@link #CONTROL_AE_ANTIBANDING_MODE_OFF OFF}</li>
562      *   <li>{@link #CONTROL_AE_ANTIBANDING_MODE_50HZ 50HZ}</li>
563      *   <li>{@link #CONTROL_AE_ANTIBANDING_MODE_60HZ 60HZ}</li>
564      *   <li>{@link #CONTROL_AE_ANTIBANDING_MODE_AUTO AUTO}</li>
565      * </ul></p>
566      * <p><b>Available values for this device:</b><br></p>
567      * <p>{@link CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES android.control.aeAvailableAntibandingModes}</p>
568      * <p>This key is available on all devices.</p>
569      *
570      * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES
571      * @see CaptureRequest#CONTROL_AE_MODE
572      * @see CaptureRequest#CONTROL_MODE
573      * @see CaptureResult#STATISTICS_SCENE_FLICKER
574      * @see #CONTROL_AE_ANTIBANDING_MODE_OFF
575      * @see #CONTROL_AE_ANTIBANDING_MODE_50HZ
576      * @see #CONTROL_AE_ANTIBANDING_MODE_60HZ
577      * @see #CONTROL_AE_ANTIBANDING_MODE_AUTO
578      */
579     @PublicKey
580     public static final Key<Integer> CONTROL_AE_ANTIBANDING_MODE =
581             new Key<Integer>("android.control.aeAntibandingMode", int.class);
582 
583     /**
584      * <p>Adjustment to auto-exposure (AE) target image
585      * brightness.</p>
586      * <p>The adjustment is measured as a count of steps, with the
587      * step size defined by {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP android.control.aeCompensationStep} and the
588      * allowed range by {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE android.control.aeCompensationRange}.</p>
589      * <p>For example, if the exposure value (EV) step is 0.333, '6'
590      * will mean an exposure compensation of +2 EV; -3 will mean an
591      * exposure compensation of -1 EV. One EV represents a doubling
592      * of image brightness. Note that this control will only be
593      * effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} <code>!=</code> OFF. This control
594      * will take effect even when {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} <code>== true</code>.</p>
595      * <p>In the event of exposure compensation value being changed, camera device
596      * may take several frames to reach the newly requested exposure target.
597      * During that time, {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} field will be in the SEARCHING
598      * state. Once the new exposure target is reached, {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} will
599      * change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or
600      * FLASH_REQUIRED (if the scene is too dark for still capture).</p>
601      * <p><b>Units</b>: Compensation steps</p>
602      * <p><b>Range of valid values:</b><br>
603      * {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE android.control.aeCompensationRange}</p>
604      * <p>This key is available on all devices.</p>
605      *
606      * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE
607      * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP
608      * @see CaptureRequest#CONTROL_AE_LOCK
609      * @see CaptureRequest#CONTROL_AE_MODE
610      * @see CaptureResult#CONTROL_AE_STATE
611      */
612     @PublicKey
613     public static final Key<Integer> CONTROL_AE_EXPOSURE_COMPENSATION =
614             new Key<Integer>("android.control.aeExposureCompensation", int.class);
615 
616     /**
617      * <p>Whether auto-exposure (AE) is currently locked to its latest
618      * calculated values.</p>
619      * <p>When set to <code>true</code> (ON), the AE algorithm is locked to its latest parameters,
620      * and will not change exposure settings until the lock is set to <code>false</code> (OFF).</p>
621      * <p>Note that even when AE is locked, the flash may be fired if
622      * the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_AUTO_FLASH /
623      * ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.</p>
624      * <p>When {@link CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION android.control.aeExposureCompensation} is changed, even if the AE lock
625      * is ON, the camera device will still adjust its exposure value.</p>
626      * <p>If AE precapture is triggered (see {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger})
627      * when AE is already locked, the camera device will not change the exposure time
628      * ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}) and sensitivity ({@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity})
629      * parameters. The flash may be fired if the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}
630      * is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the
631      * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_ALWAYS_FLASH, the scene may become overexposed.
632      * Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.</p>
633      * <p>When an AE precapture sequence is triggered, AE unlock will not be able to unlock
634      * the AE if AE is locked by the camera device internally during precapture metering
635      * sequence In other words, submitting requests with AE unlock has no effect for an
636      * ongoing precapture metering sequence. Otherwise, the precapture metering sequence
637      * will never succeed in a sequence of preview requests where AE lock is always set
638      * to <code>false</code>.</p>
639      * <p>Since the camera device has a pipeline of in-flight requests, the settings that
640      * get locked do not necessarily correspond to the settings that were present in the
641      * latest capture result received from the camera device, since additional captures
642      * and AE updates may have occurred even before the result was sent out. If an
643      * application is switching between automatic and manual control and wishes to eliminate
644      * any flicker during the switch, the following procedure is recommended:</p>
645      * <ol>
646      * <li>Starting in auto-AE mode:</li>
647      * <li>Lock AE</li>
648      * <li>Wait for the first result to be output that has the AE locked</li>
649      * <li>Copy exposure settings from that result into a request, set the request to manual AE</li>
650      * <li>Submit the capture request, proceed to run manual AE as desired.</li>
651      * </ol>
652      * <p>See {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE lock related state transition details.</p>
653      * <p>This key is available on all devices.</p>
654      *
655      * @see CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION
656      * @see CaptureRequest#CONTROL_AE_MODE
657      * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
658      * @see CaptureResult#CONTROL_AE_STATE
659      * @see CaptureRequest#SENSOR_EXPOSURE_TIME
660      * @see CaptureRequest#SENSOR_SENSITIVITY
661      */
662     @PublicKey
663     public static final Key<Boolean> CONTROL_AE_LOCK =
664             new Key<Boolean>("android.control.aeLock", boolean.class);
665 
666     /**
667      * <p>The desired mode for the camera device's
668      * auto-exposure routine.</p>
669      * <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is
670      * AUTO.</p>
671      * <p>When set to any of the ON modes, the camera device's
672      * auto-exposure routine is enabled, overriding the
673      * application's selected exposure time, sensor sensitivity,
674      * and frame duration ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime},
675      * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and
676      * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}). If one of the FLASH modes
677      * is selected, the camera device's flash unit controls are
678      * also overridden.</p>
679      * <p>The FLASH modes are only available if the camera device
680      * has a flash unit ({@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} is <code>true</code>).</p>
681      * <p>If flash TORCH mode is desired, this field must be set to
682      * ON or OFF, and {@link CaptureRequest#FLASH_MODE android.flash.mode} set to TORCH.</p>
683      * <p>When set to any of the ON modes, the values chosen by the
684      * camera device auto-exposure routine for the overridden
685      * fields for a given capture will be available in its
686      * CaptureResult.</p>
687      * <p><b>Possible values:</b>
688      * <ul>
689      *   <li>{@link #CONTROL_AE_MODE_OFF OFF}</li>
690      *   <li>{@link #CONTROL_AE_MODE_ON ON}</li>
691      *   <li>{@link #CONTROL_AE_MODE_ON_AUTO_FLASH ON_AUTO_FLASH}</li>
692      *   <li>{@link #CONTROL_AE_MODE_ON_ALWAYS_FLASH ON_ALWAYS_FLASH}</li>
693      *   <li>{@link #CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE ON_AUTO_FLASH_REDEYE}</li>
694      * </ul></p>
695      * <p><b>Available values for this device:</b><br>
696      * {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_MODES android.control.aeAvailableModes}</p>
697      * <p>This key is available on all devices.</p>
698      *
699      * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_MODES
700      * @see CaptureRequest#CONTROL_MODE
701      * @see CameraCharacteristics#FLASH_INFO_AVAILABLE
702      * @see CaptureRequest#FLASH_MODE
703      * @see CaptureRequest#SENSOR_EXPOSURE_TIME
704      * @see CaptureRequest#SENSOR_FRAME_DURATION
705      * @see CaptureRequest#SENSOR_SENSITIVITY
706      * @see #CONTROL_AE_MODE_OFF
707      * @see #CONTROL_AE_MODE_ON
708      * @see #CONTROL_AE_MODE_ON_AUTO_FLASH
709      * @see #CONTROL_AE_MODE_ON_ALWAYS_FLASH
710      * @see #CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
711      */
712     @PublicKey
713     public static final Key<Integer> CONTROL_AE_MODE =
714             new Key<Integer>("android.control.aeMode", int.class);
715 
716     /**
717      * <p>List of metering areas to use for auto-exposure adjustment.</p>
718      * <p>Not available if {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AE android.control.maxRegionsAe} is 0.
719      * Otherwise will always be present.</p>
720      * <p>The maximum number of regions supported by the device is determined by the value
721      * of {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AE android.control.maxRegionsAe}.</p>
722      * <p>The coordinate system is based on the active pixel array,
723      * with (0,0) being the top-left pixel in the active pixel array, and
724      * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
725      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
726      * bottom-right pixel in the active pixel array.</p>
727      * <p>The weight must be within <code>[0, 1000]</code>, and represents a weight
728      * for every pixel in the area. This means that a large metering area
729      * with the same weight as a smaller area will have more effect in
730      * the metering result. Metering areas can partially overlap and the
731      * camera device will add the weights in the overlap region.</p>
732      * <p>The weights are relative to weights of other exposure metering regions, so if only one
733      * region is used, all non-zero weights will have the same effect. A region with 0
734      * weight is ignored.</p>
735      * <p>If all regions have 0 weight, then no specific metering area needs to be used by the
736      * camera device.</p>
737      * <p>If the metering region is outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in
738      * capture result metadata, the camera device will ignore the sections outside the crop
739      * region and output only the intersection rectangle as the metering region in the result
740      * metadata.  If the region is entirely outside the crop region, it will be ignored and
741      * not reported in the result metadata.</p>
742      * <p><b>Units</b>: Pixel coordinates within {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
743      * <p><b>Range of valid values:</b><br>
744      * Coordinates must be between <code>[(0,0), (width, height))</code> of
745      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
746      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
747      *
748      * @see CameraCharacteristics#CONTROL_MAX_REGIONS_AE
749      * @see CaptureRequest#SCALER_CROP_REGION
750      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
751      */
752     @PublicKey
753     public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AE_REGIONS =
754             new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.aeRegions", android.hardware.camera2.params.MeteringRectangle[].class);
755 
756     /**
757      * <p>Range over which the auto-exposure routine can
758      * adjust the capture frame rate to maintain good
759      * exposure.</p>
760      * <p>Only constrains auto-exposure (AE) algorithm, not
761      * manual control of {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime} and
762      * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}.</p>
763      * <p><b>Units</b>: Frames per second (FPS)</p>
764      * <p><b>Range of valid values:</b><br>
765      * Any of the entries in {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES android.control.aeAvailableTargetFpsRanges}</p>
766      * <p>This key is available on all devices.</p>
767      *
768      * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES
769      * @see CaptureRequest#SENSOR_EXPOSURE_TIME
770      * @see CaptureRequest#SENSOR_FRAME_DURATION
771      */
772     @PublicKey
773     public static final Key<android.util.Range<Integer>> CONTROL_AE_TARGET_FPS_RANGE =
774             new Key<android.util.Range<Integer>>("android.control.aeTargetFpsRange", new TypeReference<android.util.Range<Integer>>() {{ }});
775 
776     /**
777      * <p>Whether the camera device will trigger a precapture
778      * metering sequence when it processes this request.</p>
779      * <p>This entry is normally set to IDLE, or is not
780      * included at all in the request settings. When included and
781      * set to START, the camera device will trigger the auto-exposure (AE)
782      * precapture metering sequence.</p>
783      * <p>When set to CANCEL, the camera device will cancel any active
784      * precapture metering trigger, and return to its initial AE state.
785      * If a precapture metering sequence is already completed, and the camera
786      * device has implicitly locked the AE for subsequent still capture, the
787      * CANCEL trigger will unlock the AE and return to its initial AE state.</p>
788      * <p>The precapture sequence should be triggered before starting a
789      * high-quality still capture for final metering decisions to
790      * be made, and for firing pre-capture flash pulses to estimate
791      * scene brightness and required final capture flash power, when
792      * the flash is enabled.</p>
793      * <p>Normally, this entry should be set to START for only a
794      * single request, and the application should wait until the
795      * sequence completes before starting a new one.</p>
796      * <p>When a precapture metering sequence is finished, the camera device
797      * may lock the auto-exposure routine internally to be able to accurately expose the
798      * subsequent still capture image (<code>{@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} == STILL_CAPTURE</code>).
799      * For this case, the AE may not resume normal scan if no subsequent still capture is
800      * submitted. To ensure that the AE routine restarts normal scan, the application should
801      * submit a request with <code>{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} == true</code>, followed by a request
802      * with <code>{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} == false</code>, if the application decides not to submit a
803      * still capture request after the precapture sequence completes. Alternatively, for
804      * API level 23 or newer devices, the CANCEL can be used to unlock the camera device
805      * internally locked AE if the application doesn't submit a still capture request after
806      * the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not
807      * be used in devices that have earlier API levels.</p>
808      * <p>The exact effect of auto-exposure (AE) precapture trigger
809      * depends on the current AE mode and state; see
810      * {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE precapture state transition
811      * details.</p>
812      * <p>On LEGACY-level devices, the precapture trigger is not supported;
813      * capturing a high-resolution JPEG image will automatically trigger a
814      * precapture sequence before the high-resolution capture, including
815      * potentially firing a pre-capture flash.</p>
816      * <p>Using the precapture trigger and the auto-focus trigger {@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}
817      * simultaneously is allowed. However, since these triggers often require cooperation between
818      * the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
819      * focus sweep), the camera device may delay acting on a later trigger until the previous
820      * trigger has been fully handled. This may lead to longer intervals between the trigger and
821      * changes to {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} indicating the start of the precapture sequence, for
822      * example.</p>
823      * <p>If both the precapture and the auto-focus trigger are activated on the same request, then
824      * the camera device will complete them in the optimal order for that device.</p>
825      * <p><b>Possible values:</b>
826      * <ul>
827      *   <li>{@link #CONTROL_AE_PRECAPTURE_TRIGGER_IDLE IDLE}</li>
828      *   <li>{@link #CONTROL_AE_PRECAPTURE_TRIGGER_START START}</li>
829      *   <li>{@link #CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL CANCEL}</li>
830      * </ul></p>
831      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
832      * <p><b>Limited capability</b> -
833      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
834      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
835      *
836      * @see CaptureRequest#CONTROL_AE_LOCK
837      * @see CaptureResult#CONTROL_AE_STATE
838      * @see CaptureRequest#CONTROL_AF_TRIGGER
839      * @see CaptureRequest#CONTROL_CAPTURE_INTENT
840      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
841      * @see #CONTROL_AE_PRECAPTURE_TRIGGER_IDLE
842      * @see #CONTROL_AE_PRECAPTURE_TRIGGER_START
843      * @see #CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
844      */
845     @PublicKey
846     public static final Key<Integer> CONTROL_AE_PRECAPTURE_TRIGGER =
847             new Key<Integer>("android.control.aePrecaptureTrigger", int.class);
848 
849     /**
850      * <p>Current state of the auto-exposure (AE) algorithm.</p>
851      * <p>Switching between or enabling AE modes ({@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}) always
852      * resets the AE state to INACTIVE. Similarly, switching between {@link CaptureRequest#CONTROL_MODE android.control.mode},
853      * or {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} if <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code> resets all
854      * the algorithm states to INACTIVE.</p>
855      * <p>The camera device can do several state transitions between two results, if it is
856      * allowed by the state transition table. For example: INACTIVE may never actually be
857      * seen in a result.</p>
858      * <p>The state in the result is the state for this image (in sync with this image): if
859      * AE state becomes CONVERGED, then the image data associated with this result should
860      * be good to use.</p>
861      * <p>Below are state transition tables for different AE modes.</p>
862      * <table>
863      * <thead>
864      * <tr>
865      * <th align="center">State</th>
866      * <th align="center">Transition Cause</th>
867      * <th align="center">New State</th>
868      * <th align="center">Notes</th>
869      * </tr>
870      * </thead>
871      * <tbody>
872      * <tr>
873      * <td align="center">INACTIVE</td>
874      * <td align="center"></td>
875      * <td align="center">INACTIVE</td>
876      * <td align="center">Camera device auto exposure algorithm is disabled</td>
877      * </tr>
878      * </tbody>
879      * </table>
880      * <p>When {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is AE_MODE_ON_*:</p>
881      * <table>
882      * <thead>
883      * <tr>
884      * <th align="center">State</th>
885      * <th align="center">Transition Cause</th>
886      * <th align="center">New State</th>
887      * <th align="center">Notes</th>
888      * </tr>
889      * </thead>
890      * <tbody>
891      * <tr>
892      * <td align="center">INACTIVE</td>
893      * <td align="center">Camera device initiates AE scan</td>
894      * <td align="center">SEARCHING</td>
895      * <td align="center">Values changing</td>
896      * </tr>
897      * <tr>
898      * <td align="center">INACTIVE</td>
899      * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON</td>
900      * <td align="center">LOCKED</td>
901      * <td align="center">Values locked</td>
902      * </tr>
903      * <tr>
904      * <td align="center">SEARCHING</td>
905      * <td align="center">Camera device finishes AE scan</td>
906      * <td align="center">CONVERGED</td>
907      * <td align="center">Good values, not changing</td>
908      * </tr>
909      * <tr>
910      * <td align="center">SEARCHING</td>
911      * <td align="center">Camera device finishes AE scan</td>
912      * <td align="center">FLASH_REQUIRED</td>
913      * <td align="center">Converged but too dark w/o flash</td>
914      * </tr>
915      * <tr>
916      * <td align="center">SEARCHING</td>
917      * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON</td>
918      * <td align="center">LOCKED</td>
919      * <td align="center">Values locked</td>
920      * </tr>
921      * <tr>
922      * <td align="center">CONVERGED</td>
923      * <td align="center">Camera device initiates AE scan</td>
924      * <td align="center">SEARCHING</td>
925      * <td align="center">Values changing</td>
926      * </tr>
927      * <tr>
928      * <td align="center">CONVERGED</td>
929      * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON</td>
930      * <td align="center">LOCKED</td>
931      * <td align="center">Values locked</td>
932      * </tr>
933      * <tr>
934      * <td align="center">FLASH_REQUIRED</td>
935      * <td align="center">Camera device initiates AE scan</td>
936      * <td align="center">SEARCHING</td>
937      * <td align="center">Values changing</td>
938      * </tr>
939      * <tr>
940      * <td align="center">FLASH_REQUIRED</td>
941      * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON</td>
942      * <td align="center">LOCKED</td>
943      * <td align="center">Values locked</td>
944      * </tr>
945      * <tr>
946      * <td align="center">LOCKED</td>
947      * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF</td>
948      * <td align="center">SEARCHING</td>
949      * <td align="center">Values not good after unlock</td>
950      * </tr>
951      * <tr>
952      * <td align="center">LOCKED</td>
953      * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF</td>
954      * <td align="center">CONVERGED</td>
955      * <td align="center">Values good after unlock</td>
956      * </tr>
957      * <tr>
958      * <td align="center">LOCKED</td>
959      * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF</td>
960      * <td align="center">FLASH_REQUIRED</td>
961      * <td align="center">Exposure good, but too dark</td>
962      * </tr>
963      * <tr>
964      * <td align="center">PRECAPTURE</td>
965      * <td align="center">Sequence done. {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF</td>
966      * <td align="center">CONVERGED</td>
967      * <td align="center">Ready for high-quality capture</td>
968      * </tr>
969      * <tr>
970      * <td align="center">PRECAPTURE</td>
971      * <td align="center">Sequence done. {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON</td>
972      * <td align="center">LOCKED</td>
973      * <td align="center">Ready for high-quality capture</td>
974      * </tr>
975      * <tr>
976      * <td align="center">LOCKED</td>
977      * <td align="center">aeLock is ON and aePrecaptureTrigger is START</td>
978      * <td align="center">LOCKED</td>
979      * <td align="center">Precapture trigger is ignored when AE is already locked</td>
980      * </tr>
981      * <tr>
982      * <td align="center">LOCKED</td>
983      * <td align="center">aeLock is ON and aePrecaptureTrigger is CANCEL</td>
984      * <td align="center">LOCKED</td>
985      * <td align="center">Precapture trigger is ignored when AE is already locked</td>
986      * </tr>
987      * <tr>
988      * <td align="center">Any state (excluding LOCKED)</td>
989      * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is START</td>
990      * <td align="center">PRECAPTURE</td>
991      * <td align="center">Start AE precapture metering sequence</td>
992      * </tr>
993      * <tr>
994      * <td align="center">Any state (excluding LOCKED)</td>
995      * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is CANCEL</td>
996      * <td align="center">INACTIVE</td>
997      * <td align="center">Currently active precapture metering sequence is canceled</td>
998      * </tr>
999      * </tbody>
1000      * </table>
1001      * <p>For the above table, the camera device may skip reporting any state changes that happen
1002      * without application intervention (i.e. mode switch, trigger, locking). Any state that
1003      * can be skipped in that manner is called a transient state.</p>
1004      * <p>For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions
1005      * listed in above table, it is also legal for the camera device to skip one or more
1006      * transient states between two results. See below table for examples:</p>
1007      * <table>
1008      * <thead>
1009      * <tr>
1010      * <th align="center">State</th>
1011      * <th align="center">Transition Cause</th>
1012      * <th align="center">New State</th>
1013      * <th align="center">Notes</th>
1014      * </tr>
1015      * </thead>
1016      * <tbody>
1017      * <tr>
1018      * <td align="center">INACTIVE</td>
1019      * <td align="center">Camera device finished AE scan</td>
1020      * <td align="center">CONVERGED</td>
1021      * <td align="center">Values are already good, transient states are skipped by camera device.</td>
1022      * </tr>
1023      * <tr>
1024      * <td align="center">Any state (excluding LOCKED)</td>
1025      * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is START, sequence done</td>
1026      * <td align="center">FLASH_REQUIRED</td>
1027      * <td align="center">Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.</td>
1028      * </tr>
1029      * <tr>
1030      * <td align="center">Any state (excluding LOCKED)</td>
1031      * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is START, sequence done</td>
1032      * <td align="center">CONVERGED</td>
1033      * <td align="center">Converged after a precapture sequence, transient states are skipped by camera device.</td>
1034      * </tr>
1035      * <tr>
1036      * <td align="center">Any state (excluding LOCKED)</td>
1037      * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is CANCEL, converged</td>
1038      * <td align="center">FLASH_REQUIRED</td>
1039      * <td align="center">Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.</td>
1040      * </tr>
1041      * <tr>
1042      * <td align="center">Any state (excluding LOCKED)</td>
1043      * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is CANCEL, converged</td>
1044      * <td align="center">CONVERGED</td>
1045      * <td align="center">Converged after a precapture sequenceis canceled, transient states are skipped by camera device.</td>
1046      * </tr>
1047      * <tr>
1048      * <td align="center">CONVERGED</td>
1049      * <td align="center">Camera device finished AE scan</td>
1050      * <td align="center">FLASH_REQUIRED</td>
1051      * <td align="center">Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.</td>
1052      * </tr>
1053      * <tr>
1054      * <td align="center">FLASH_REQUIRED</td>
1055      * <td align="center">Camera device finished AE scan</td>
1056      * <td align="center">CONVERGED</td>
1057      * <td align="center">Converged after a new scan, transient states are skipped by camera device.</td>
1058      * </tr>
1059      * </tbody>
1060      * </table>
1061      * <p><b>Possible values:</b>
1062      * <ul>
1063      *   <li>{@link #CONTROL_AE_STATE_INACTIVE INACTIVE}</li>
1064      *   <li>{@link #CONTROL_AE_STATE_SEARCHING SEARCHING}</li>
1065      *   <li>{@link #CONTROL_AE_STATE_CONVERGED CONVERGED}</li>
1066      *   <li>{@link #CONTROL_AE_STATE_LOCKED LOCKED}</li>
1067      *   <li>{@link #CONTROL_AE_STATE_FLASH_REQUIRED FLASH_REQUIRED}</li>
1068      *   <li>{@link #CONTROL_AE_STATE_PRECAPTURE PRECAPTURE}</li>
1069      * </ul></p>
1070      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
1071      * <p><b>Limited capability</b> -
1072      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
1073      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
1074      *
1075      * @see CaptureRequest#CONTROL_AE_LOCK
1076      * @see CaptureRequest#CONTROL_AE_MODE
1077      * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
1078      * @see CaptureRequest#CONTROL_MODE
1079      * @see CaptureRequest#CONTROL_SCENE_MODE
1080      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
1081      * @see #CONTROL_AE_STATE_INACTIVE
1082      * @see #CONTROL_AE_STATE_SEARCHING
1083      * @see #CONTROL_AE_STATE_CONVERGED
1084      * @see #CONTROL_AE_STATE_LOCKED
1085      * @see #CONTROL_AE_STATE_FLASH_REQUIRED
1086      * @see #CONTROL_AE_STATE_PRECAPTURE
1087      */
1088     @PublicKey
1089     public static final Key<Integer> CONTROL_AE_STATE =
1090             new Key<Integer>("android.control.aeState", int.class);
1091 
1092     /**
1093      * <p>Whether auto-focus (AF) is currently enabled, and what
1094      * mode it is set to.</p>
1095      * <p>Only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} = AUTO and the lens is not fixed focus
1096      * (i.e. <code>{@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} &gt; 0</code>). Also note that
1097      * when {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is OFF, the behavior of AF is device
1098      * dependent. It is recommended to lock AF by using {@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger} before
1099      * setting {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} to OFF, or set AF mode to OFF when AE is OFF.</p>
1100      * <p>If the lens is controlled by the camera device auto-focus algorithm,
1101      * the camera device will report the current AF status in {@link CaptureResult#CONTROL_AF_STATE android.control.afState}
1102      * in result metadata.</p>
1103      * <p><b>Possible values:</b>
1104      * <ul>
1105      *   <li>{@link #CONTROL_AF_MODE_OFF OFF}</li>
1106      *   <li>{@link #CONTROL_AF_MODE_AUTO AUTO}</li>
1107      *   <li>{@link #CONTROL_AF_MODE_MACRO MACRO}</li>
1108      *   <li>{@link #CONTROL_AF_MODE_CONTINUOUS_VIDEO CONTINUOUS_VIDEO}</li>
1109      *   <li>{@link #CONTROL_AF_MODE_CONTINUOUS_PICTURE CONTINUOUS_PICTURE}</li>
1110      *   <li>{@link #CONTROL_AF_MODE_EDOF EDOF}</li>
1111      * </ul></p>
1112      * <p><b>Available values for this device:</b><br>
1113      * {@link CameraCharacteristics#CONTROL_AF_AVAILABLE_MODES android.control.afAvailableModes}</p>
1114      * <p>This key is available on all devices.</p>
1115      *
1116      * @see CaptureRequest#CONTROL_AE_MODE
1117      * @see CameraCharacteristics#CONTROL_AF_AVAILABLE_MODES
1118      * @see CaptureResult#CONTROL_AF_STATE
1119      * @see CaptureRequest#CONTROL_AF_TRIGGER
1120      * @see CaptureRequest#CONTROL_MODE
1121      * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
1122      * @see #CONTROL_AF_MODE_OFF
1123      * @see #CONTROL_AF_MODE_AUTO
1124      * @see #CONTROL_AF_MODE_MACRO
1125      * @see #CONTROL_AF_MODE_CONTINUOUS_VIDEO
1126      * @see #CONTROL_AF_MODE_CONTINUOUS_PICTURE
1127      * @see #CONTROL_AF_MODE_EDOF
1128      */
1129     @PublicKey
1130     public static final Key<Integer> CONTROL_AF_MODE =
1131             new Key<Integer>("android.control.afMode", int.class);
1132 
1133     /**
1134      * <p>List of metering areas to use for auto-focus.</p>
1135      * <p>Not available if {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AF android.control.maxRegionsAf} is 0.
1136      * Otherwise will always be present.</p>
1137      * <p>The maximum number of focus areas supported by the device is determined by the value
1138      * of {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AF android.control.maxRegionsAf}.</p>
1139      * <p>The coordinate system is based on the active pixel array,
1140      * with (0,0) being the top-left pixel in the active pixel array, and
1141      * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
1142      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
1143      * bottom-right pixel in the active pixel array.</p>
1144      * <p>The weight must be within <code>[0, 1000]</code>, and represents a weight
1145      * for every pixel in the area. This means that a large metering area
1146      * with the same weight as a smaller area will have more effect in
1147      * the metering result. Metering areas can partially overlap and the
1148      * camera device will add the weights in the overlap region.</p>
1149      * <p>The weights are relative to weights of other metering regions, so if only one region
1150      * is used, all non-zero weights will have the same effect. A region with 0 weight is
1151      * ignored.</p>
1152      * <p>If all regions have 0 weight, then no specific metering area needs to be used by the
1153      * camera device.</p>
1154      * <p>If the metering region is outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in
1155      * capture result metadata, the camera device will ignore the sections outside the crop
1156      * region and output only the intersection rectangle as the metering region in the result
1157      * metadata. If the region is entirely outside the crop region, it will be ignored and
1158      * not reported in the result metadata.</p>
1159      * <p><b>Units</b>: Pixel coordinates within {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
1160      * <p><b>Range of valid values:</b><br>
1161      * Coordinates must be between <code>[(0,0), (width, height))</code> of
1162      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
1163      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
1164      *
1165      * @see CameraCharacteristics#CONTROL_MAX_REGIONS_AF
1166      * @see CaptureRequest#SCALER_CROP_REGION
1167      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
1168      */
1169     @PublicKey
1170     public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AF_REGIONS =
1171             new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.afRegions", android.hardware.camera2.params.MeteringRectangle[].class);
1172 
1173     /**
1174      * <p>Whether the camera device will trigger autofocus for this request.</p>
1175      * <p>This entry is normally set to IDLE, or is not
1176      * included at all in the request settings.</p>
1177      * <p>When included and set to START, the camera device will trigger the
1178      * autofocus algorithm. If autofocus is disabled, this trigger has no effect.</p>
1179      * <p>When set to CANCEL, the camera device will cancel any active trigger,
1180      * and return to its initial AF state.</p>
1181      * <p>Generally, applications should set this entry to START or CANCEL for only a
1182      * single capture, and then return it to IDLE (or not set at all). Specifying
1183      * START for multiple captures in a row means restarting the AF operation over
1184      * and over again.</p>
1185      * <p>See {@link CaptureResult#CONTROL_AF_STATE android.control.afState} for what the trigger means for each AF mode.</p>
1186      * <p>Using the autofocus trigger and the precapture trigger {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}
1187      * simultaneously is allowed. However, since these triggers often require cooperation between
1188      * the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
1189      * focus sweep), the camera device may delay acting on a later trigger until the previous
1190      * trigger has been fully handled. This may lead to longer intervals between the trigger and
1191      * changes to {@link CaptureResult#CONTROL_AF_STATE android.control.afState}, for example.</p>
1192      * <p><b>Possible values:</b>
1193      * <ul>
1194      *   <li>{@link #CONTROL_AF_TRIGGER_IDLE IDLE}</li>
1195      *   <li>{@link #CONTROL_AF_TRIGGER_START START}</li>
1196      *   <li>{@link #CONTROL_AF_TRIGGER_CANCEL CANCEL}</li>
1197      * </ul></p>
1198      * <p>This key is available on all devices.</p>
1199      *
1200      * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
1201      * @see CaptureResult#CONTROL_AF_STATE
1202      * @see #CONTROL_AF_TRIGGER_IDLE
1203      * @see #CONTROL_AF_TRIGGER_START
1204      * @see #CONTROL_AF_TRIGGER_CANCEL
1205      */
1206     @PublicKey
1207     public static final Key<Integer> CONTROL_AF_TRIGGER =
1208             new Key<Integer>("android.control.afTrigger", int.class);
1209 
1210     /**
1211      * <p>Current state of auto-focus (AF) algorithm.</p>
1212      * <p>Switching between or enabling AF modes ({@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}) always
1213      * resets the AF state to INACTIVE. Similarly, switching between {@link CaptureRequest#CONTROL_MODE android.control.mode},
1214      * or {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} if <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code> resets all
1215      * the algorithm states to INACTIVE.</p>
1216      * <p>The camera device can do several state transitions between two results, if it is
1217      * allowed by the state transition table. For example: INACTIVE may never actually be
1218      * seen in a result.</p>
1219      * <p>The state in the result is the state for this image (in sync with this image): if
1220      * AF state becomes FOCUSED, then the image data associated with this result should
1221      * be sharp.</p>
1222      * <p>Below are state transition tables for different AF modes.</p>
1223      * <p>When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_OFF or AF_MODE_EDOF:</p>
1224      * <table>
1225      * <thead>
1226      * <tr>
1227      * <th align="center">State</th>
1228      * <th align="center">Transition Cause</th>
1229      * <th align="center">New State</th>
1230      * <th align="center">Notes</th>
1231      * </tr>
1232      * </thead>
1233      * <tbody>
1234      * <tr>
1235      * <td align="center">INACTIVE</td>
1236      * <td align="center"></td>
1237      * <td align="center">INACTIVE</td>
1238      * <td align="center">Never changes</td>
1239      * </tr>
1240      * </tbody>
1241      * </table>
1242      * <p>When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_AUTO or AF_MODE_MACRO:</p>
1243      * <table>
1244      * <thead>
1245      * <tr>
1246      * <th align="center">State</th>
1247      * <th align="center">Transition Cause</th>
1248      * <th align="center">New State</th>
1249      * <th align="center">Notes</th>
1250      * </tr>
1251      * </thead>
1252      * <tbody>
1253      * <tr>
1254      * <td align="center">INACTIVE</td>
1255      * <td align="center">AF_TRIGGER</td>
1256      * <td align="center">ACTIVE_SCAN</td>
1257      * <td align="center">Start AF sweep, Lens now moving</td>
1258      * </tr>
1259      * <tr>
1260      * <td align="center">ACTIVE_SCAN</td>
1261      * <td align="center">AF sweep done</td>
1262      * <td align="center">FOCUSED_LOCKED</td>
1263      * <td align="center">Focused, Lens now locked</td>
1264      * </tr>
1265      * <tr>
1266      * <td align="center">ACTIVE_SCAN</td>
1267      * <td align="center">AF sweep done</td>
1268      * <td align="center">NOT_FOCUSED_LOCKED</td>
1269      * <td align="center">Not focused, Lens now locked</td>
1270      * </tr>
1271      * <tr>
1272      * <td align="center">ACTIVE_SCAN</td>
1273      * <td align="center">AF_CANCEL</td>
1274      * <td align="center">INACTIVE</td>
1275      * <td align="center">Cancel/reset AF, Lens now locked</td>
1276      * </tr>
1277      * <tr>
1278      * <td align="center">FOCUSED_LOCKED</td>
1279      * <td align="center">AF_CANCEL</td>
1280      * <td align="center">INACTIVE</td>
1281      * <td align="center">Cancel/reset AF</td>
1282      * </tr>
1283      * <tr>
1284      * <td align="center">FOCUSED_LOCKED</td>
1285      * <td align="center">AF_TRIGGER</td>
1286      * <td align="center">ACTIVE_SCAN</td>
1287      * <td align="center">Start new sweep, Lens now moving</td>
1288      * </tr>
1289      * <tr>
1290      * <td align="center">NOT_FOCUSED_LOCKED</td>
1291      * <td align="center">AF_CANCEL</td>
1292      * <td align="center">INACTIVE</td>
1293      * <td align="center">Cancel/reset AF</td>
1294      * </tr>
1295      * <tr>
1296      * <td align="center">NOT_FOCUSED_LOCKED</td>
1297      * <td align="center">AF_TRIGGER</td>
1298      * <td align="center">ACTIVE_SCAN</td>
1299      * <td align="center">Start new sweep, Lens now moving</td>
1300      * </tr>
1301      * <tr>
1302      * <td align="center">Any state</td>
1303      * <td align="center">Mode change</td>
1304      * <td align="center">INACTIVE</td>
1305      * <td align="center"></td>
1306      * </tr>
1307      * </tbody>
1308      * </table>
1309      * <p>For the above table, the camera device may skip reporting any state changes that happen
1310      * without application intervention (i.e. mode switch, trigger, locking). Any state that
1311      * can be skipped in that manner is called a transient state.</p>
1312      * <p>For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the
1313      * state transitions listed in above table, it is also legal for the camera device to skip
1314      * one or more transient states between two results. See below table for examples:</p>
1315      * <table>
1316      * <thead>
1317      * <tr>
1318      * <th align="center">State</th>
1319      * <th align="center">Transition Cause</th>
1320      * <th align="center">New State</th>
1321      * <th align="center">Notes</th>
1322      * </tr>
1323      * </thead>
1324      * <tbody>
1325      * <tr>
1326      * <td align="center">INACTIVE</td>
1327      * <td align="center">AF_TRIGGER</td>
1328      * <td align="center">FOCUSED_LOCKED</td>
1329      * <td align="center">Focus is already good or good after a scan, lens is now locked.</td>
1330      * </tr>
1331      * <tr>
1332      * <td align="center">INACTIVE</td>
1333      * <td align="center">AF_TRIGGER</td>
1334      * <td align="center">NOT_FOCUSED_LOCKED</td>
1335      * <td align="center">Focus failed after a scan, lens is now locked.</td>
1336      * </tr>
1337      * <tr>
1338      * <td align="center">FOCUSED_LOCKED</td>
1339      * <td align="center">AF_TRIGGER</td>
1340      * <td align="center">FOCUSED_LOCKED</td>
1341      * <td align="center">Focus is already good or good after a scan, lens is now locked.</td>
1342      * </tr>
1343      * <tr>
1344      * <td align="center">NOT_FOCUSED_LOCKED</td>
1345      * <td align="center">AF_TRIGGER</td>
1346      * <td align="center">FOCUSED_LOCKED</td>
1347      * <td align="center">Focus is good after a scan, lens is not locked.</td>
1348      * </tr>
1349      * </tbody>
1350      * </table>
1351      * <p>When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_CONTINUOUS_VIDEO:</p>
1352      * <table>
1353      * <thead>
1354      * <tr>
1355      * <th align="center">State</th>
1356      * <th align="center">Transition Cause</th>
1357      * <th align="center">New State</th>
1358      * <th align="center">Notes</th>
1359      * </tr>
1360      * </thead>
1361      * <tbody>
1362      * <tr>
1363      * <td align="center">INACTIVE</td>
1364      * <td align="center">Camera device initiates new scan</td>
1365      * <td align="center">PASSIVE_SCAN</td>
1366      * <td align="center">Start AF scan, Lens now moving</td>
1367      * </tr>
1368      * <tr>
1369      * <td align="center">INACTIVE</td>
1370      * <td align="center">AF_TRIGGER</td>
1371      * <td align="center">NOT_FOCUSED_LOCKED</td>
1372      * <td align="center">AF state query, Lens now locked</td>
1373      * </tr>
1374      * <tr>
1375      * <td align="center">PASSIVE_SCAN</td>
1376      * <td align="center">Camera device completes current scan</td>
1377      * <td align="center">PASSIVE_FOCUSED</td>
1378      * <td align="center">End AF scan, Lens now locked</td>
1379      * </tr>
1380      * <tr>
1381      * <td align="center">PASSIVE_SCAN</td>
1382      * <td align="center">Camera device fails current scan</td>
1383      * <td align="center">PASSIVE_UNFOCUSED</td>
1384      * <td align="center">End AF scan, Lens now locked</td>
1385      * </tr>
1386      * <tr>
1387      * <td align="center">PASSIVE_SCAN</td>
1388      * <td align="center">AF_TRIGGER</td>
1389      * <td align="center">FOCUSED_LOCKED</td>
1390      * <td align="center">Immediate transition, if focus is good. Lens now locked</td>
1391      * </tr>
1392      * <tr>
1393      * <td align="center">PASSIVE_SCAN</td>
1394      * <td align="center">AF_TRIGGER</td>
1395      * <td align="center">NOT_FOCUSED_LOCKED</td>
1396      * <td align="center">Immediate transition, if focus is bad. Lens now locked</td>
1397      * </tr>
1398      * <tr>
1399      * <td align="center">PASSIVE_SCAN</td>
1400      * <td align="center">AF_CANCEL</td>
1401      * <td align="center">INACTIVE</td>
1402      * <td align="center">Reset lens position, Lens now locked</td>
1403      * </tr>
1404      * <tr>
1405      * <td align="center">PASSIVE_FOCUSED</td>
1406      * <td align="center">Camera device initiates new scan</td>
1407      * <td align="center">PASSIVE_SCAN</td>
1408      * <td align="center">Start AF scan, Lens now moving</td>
1409      * </tr>
1410      * <tr>
1411      * <td align="center">PASSIVE_UNFOCUSED</td>
1412      * <td align="center">Camera device initiates new scan</td>
1413      * <td align="center">PASSIVE_SCAN</td>
1414      * <td align="center">Start AF scan, Lens now moving</td>
1415      * </tr>
1416      * <tr>
1417      * <td align="center">PASSIVE_FOCUSED</td>
1418      * <td align="center">AF_TRIGGER</td>
1419      * <td align="center">FOCUSED_LOCKED</td>
1420      * <td align="center">Immediate transition, lens now locked</td>
1421      * </tr>
1422      * <tr>
1423      * <td align="center">PASSIVE_UNFOCUSED</td>
1424      * <td align="center">AF_TRIGGER</td>
1425      * <td align="center">NOT_FOCUSED_LOCKED</td>
1426      * <td align="center">Immediate transition, lens now locked</td>
1427      * </tr>
1428      * <tr>
1429      * <td align="center">FOCUSED_LOCKED</td>
1430      * <td align="center">AF_TRIGGER</td>
1431      * <td align="center">FOCUSED_LOCKED</td>
1432      * <td align="center">No effect</td>
1433      * </tr>
1434      * <tr>
1435      * <td align="center">FOCUSED_LOCKED</td>
1436      * <td align="center">AF_CANCEL</td>
1437      * <td align="center">INACTIVE</td>
1438      * <td align="center">Restart AF scan</td>
1439      * </tr>
1440      * <tr>
1441      * <td align="center">NOT_FOCUSED_LOCKED</td>
1442      * <td align="center">AF_TRIGGER</td>
1443      * <td align="center">NOT_FOCUSED_LOCKED</td>
1444      * <td align="center">No effect</td>
1445      * </tr>
1446      * <tr>
1447      * <td align="center">NOT_FOCUSED_LOCKED</td>
1448      * <td align="center">AF_CANCEL</td>
1449      * <td align="center">INACTIVE</td>
1450      * <td align="center">Restart AF scan</td>
1451      * </tr>
1452      * </tbody>
1453      * </table>
1454      * <p>When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_CONTINUOUS_PICTURE:</p>
1455      * <table>
1456      * <thead>
1457      * <tr>
1458      * <th align="center">State</th>
1459      * <th align="center">Transition Cause</th>
1460      * <th align="center">New State</th>
1461      * <th align="center">Notes</th>
1462      * </tr>
1463      * </thead>
1464      * <tbody>
1465      * <tr>
1466      * <td align="center">INACTIVE</td>
1467      * <td align="center">Camera device initiates new scan</td>
1468      * <td align="center">PASSIVE_SCAN</td>
1469      * <td align="center">Start AF scan, Lens now moving</td>
1470      * </tr>
1471      * <tr>
1472      * <td align="center">INACTIVE</td>
1473      * <td align="center">AF_TRIGGER</td>
1474      * <td align="center">NOT_FOCUSED_LOCKED</td>
1475      * <td align="center">AF state query, Lens now locked</td>
1476      * </tr>
1477      * <tr>
1478      * <td align="center">PASSIVE_SCAN</td>
1479      * <td align="center">Camera device completes current scan</td>
1480      * <td align="center">PASSIVE_FOCUSED</td>
1481      * <td align="center">End AF scan, Lens now locked</td>
1482      * </tr>
1483      * <tr>
1484      * <td align="center">PASSIVE_SCAN</td>
1485      * <td align="center">Camera device fails current scan</td>
1486      * <td align="center">PASSIVE_UNFOCUSED</td>
1487      * <td align="center">End AF scan, Lens now locked</td>
1488      * </tr>
1489      * <tr>
1490      * <td align="center">PASSIVE_SCAN</td>
1491      * <td align="center">AF_TRIGGER</td>
1492      * <td align="center">FOCUSED_LOCKED</td>
1493      * <td align="center">Eventual transition once the focus is good. Lens now locked</td>
1494      * </tr>
1495      * <tr>
1496      * <td align="center">PASSIVE_SCAN</td>
1497      * <td align="center">AF_TRIGGER</td>
1498      * <td align="center">NOT_FOCUSED_LOCKED</td>
1499      * <td align="center">Eventual transition if cannot find focus. Lens now locked</td>
1500      * </tr>
1501      * <tr>
1502      * <td align="center">PASSIVE_SCAN</td>
1503      * <td align="center">AF_CANCEL</td>
1504      * <td align="center">INACTIVE</td>
1505      * <td align="center">Reset lens position, Lens now locked</td>
1506      * </tr>
1507      * <tr>
1508      * <td align="center">PASSIVE_FOCUSED</td>
1509      * <td align="center">Camera device initiates new scan</td>
1510      * <td align="center">PASSIVE_SCAN</td>
1511      * <td align="center">Start AF scan, Lens now moving</td>
1512      * </tr>
1513      * <tr>
1514      * <td align="center">PASSIVE_UNFOCUSED</td>
1515      * <td align="center">Camera device initiates new scan</td>
1516      * <td align="center">PASSIVE_SCAN</td>
1517      * <td align="center">Start AF scan, Lens now moving</td>
1518      * </tr>
1519      * <tr>
1520      * <td align="center">PASSIVE_FOCUSED</td>
1521      * <td align="center">AF_TRIGGER</td>
1522      * <td align="center">FOCUSED_LOCKED</td>
1523      * <td align="center">Immediate trans. Lens now locked</td>
1524      * </tr>
1525      * <tr>
1526      * <td align="center">PASSIVE_UNFOCUSED</td>
1527      * <td align="center">AF_TRIGGER</td>
1528      * <td align="center">NOT_FOCUSED_LOCKED</td>
1529      * <td align="center">Immediate trans. Lens now locked</td>
1530      * </tr>
1531      * <tr>
1532      * <td align="center">FOCUSED_LOCKED</td>
1533      * <td align="center">AF_TRIGGER</td>
1534      * <td align="center">FOCUSED_LOCKED</td>
1535      * <td align="center">No effect</td>
1536      * </tr>
1537      * <tr>
1538      * <td align="center">FOCUSED_LOCKED</td>
1539      * <td align="center">AF_CANCEL</td>
1540      * <td align="center">INACTIVE</td>
1541      * <td align="center">Restart AF scan</td>
1542      * </tr>
1543      * <tr>
1544      * <td align="center">NOT_FOCUSED_LOCKED</td>
1545      * <td align="center">AF_TRIGGER</td>
1546      * <td align="center">NOT_FOCUSED_LOCKED</td>
1547      * <td align="center">No effect</td>
1548      * </tr>
1549      * <tr>
1550      * <td align="center">NOT_FOCUSED_LOCKED</td>
1551      * <td align="center">AF_CANCEL</td>
1552      * <td align="center">INACTIVE</td>
1553      * <td align="center">Restart AF scan</td>
1554      * </tr>
1555      * </tbody>
1556      * </table>
1557      * <p>When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO
1558      * (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the
1559      * camera device. When a trigger is included in a mode switch request, the trigger
1560      * will be evaluated in the context of the new mode in the request.
1561      * See below table for examples:</p>
1562      * <table>
1563      * <thead>
1564      * <tr>
1565      * <th align="center">State</th>
1566      * <th align="center">Transition Cause</th>
1567      * <th align="center">New State</th>
1568      * <th align="center">Notes</th>
1569      * </tr>
1570      * </thead>
1571      * <tbody>
1572      * <tr>
1573      * <td align="center">any state</td>
1574      * <td align="center">CAF--&gt;AUTO mode switch</td>
1575      * <td align="center">INACTIVE</td>
1576      * <td align="center">Mode switch without trigger, initial state must be INACTIVE</td>
1577      * </tr>
1578      * <tr>
1579      * <td align="center">any state</td>
1580      * <td align="center">CAF--&gt;AUTO mode switch with AF_TRIGGER</td>
1581      * <td align="center">trigger-reachable states from INACTIVE</td>
1582      * <td align="center">Mode switch with trigger, INACTIVE is skipped</td>
1583      * </tr>
1584      * <tr>
1585      * <td align="center">any state</td>
1586      * <td align="center">AUTO--&gt;CAF mode switch</td>
1587      * <td align="center">passively reachable states from INACTIVE</td>
1588      * <td align="center">Mode switch without trigger, passive transient state is skipped</td>
1589      * </tr>
1590      * </tbody>
1591      * </table>
1592      * <p><b>Possible values:</b>
1593      * <ul>
1594      *   <li>{@link #CONTROL_AF_STATE_INACTIVE INACTIVE}</li>
1595      *   <li>{@link #CONTROL_AF_STATE_PASSIVE_SCAN PASSIVE_SCAN}</li>
1596      *   <li>{@link #CONTROL_AF_STATE_PASSIVE_FOCUSED PASSIVE_FOCUSED}</li>
1597      *   <li>{@link #CONTROL_AF_STATE_ACTIVE_SCAN ACTIVE_SCAN}</li>
1598      *   <li>{@link #CONTROL_AF_STATE_FOCUSED_LOCKED FOCUSED_LOCKED}</li>
1599      *   <li>{@link #CONTROL_AF_STATE_NOT_FOCUSED_LOCKED NOT_FOCUSED_LOCKED}</li>
1600      *   <li>{@link #CONTROL_AF_STATE_PASSIVE_UNFOCUSED PASSIVE_UNFOCUSED}</li>
1601      * </ul></p>
1602      * <p>This key is available on all devices.</p>
1603      *
1604      * @see CaptureRequest#CONTROL_AF_MODE
1605      * @see CaptureRequest#CONTROL_MODE
1606      * @see CaptureRequest#CONTROL_SCENE_MODE
1607      * @see #CONTROL_AF_STATE_INACTIVE
1608      * @see #CONTROL_AF_STATE_PASSIVE_SCAN
1609      * @see #CONTROL_AF_STATE_PASSIVE_FOCUSED
1610      * @see #CONTROL_AF_STATE_ACTIVE_SCAN
1611      * @see #CONTROL_AF_STATE_FOCUSED_LOCKED
1612      * @see #CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
1613      * @see #CONTROL_AF_STATE_PASSIVE_UNFOCUSED
1614      */
1615     @PublicKey
1616     public static final Key<Integer> CONTROL_AF_STATE =
1617             new Key<Integer>("android.control.afState", int.class);
1618 
1619     /**
1620      * <p>Whether auto-white balance (AWB) is currently locked to its
1621      * latest calculated values.</p>
1622      * <p>When set to <code>true</code> (ON), the AWB algorithm is locked to its latest parameters,
1623      * and will not change color balance settings until the lock is set to <code>false</code> (OFF).</p>
1624      * <p>Since the camera device has a pipeline of in-flight requests, the settings that
1625      * get locked do not necessarily correspond to the settings that were present in the
1626      * latest capture result received from the camera device, since additional captures
1627      * and AWB updates may have occurred even before the result was sent out. If an
1628      * application is switching between automatic and manual control and wishes to eliminate
1629      * any flicker during the switch, the following procedure is recommended:</p>
1630      * <ol>
1631      * <li>Starting in auto-AWB mode:</li>
1632      * <li>Lock AWB</li>
1633      * <li>Wait for the first result to be output that has the AWB locked</li>
1634      * <li>Copy AWB settings from that result into a request, set the request to manual AWB</li>
1635      * <li>Submit the capture request, proceed to run manual AWB as desired.</li>
1636      * </ol>
1637      * <p>Note that AWB lock is only meaningful when
1638      * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} is in the AUTO mode; in other modes,
1639      * AWB is already fixed to a specific setting.</p>
1640      * <p>Some LEGACY devices may not support ON; the value is then overridden to OFF.</p>
1641      * <p>This key is available on all devices.</p>
1642      *
1643      * @see CaptureRequest#CONTROL_AWB_MODE
1644      */
1645     @PublicKey
1646     public static final Key<Boolean> CONTROL_AWB_LOCK =
1647             new Key<Boolean>("android.control.awbLock", boolean.class);
1648 
1649     /**
1650      * <p>Whether auto-white balance (AWB) is currently setting the color
1651      * transform fields, and what its illumination target
1652      * is.</p>
1653      * <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is AUTO.</p>
1654      * <p>When set to the ON mode, the camera device's auto-white balance
1655      * routine is enabled, overriding the application's selected
1656      * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
1657      * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}. Note that when {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}
1658      * is OFF, the behavior of AWB is device dependent. It is recommened to
1659      * also set AWB mode to OFF or lock AWB by using {@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} before
1660      * setting AE mode to OFF.</p>
1661      * <p>When set to the OFF mode, the camera device's auto-white balance
1662      * routine is disabled. The application manually controls the white
1663      * balance by {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}
1664      * and {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p>
1665      * <p>When set to any other modes, the camera device's auto-white
1666      * balance routine is disabled. The camera device uses each
1667      * particular illumination target for white balance
1668      * adjustment. The application's values for
1669      * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform},
1670      * {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
1671      * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} are ignored.</p>
1672      * <p><b>Possible values:</b>
1673      * <ul>
1674      *   <li>{@link #CONTROL_AWB_MODE_OFF OFF}</li>
1675      *   <li>{@link #CONTROL_AWB_MODE_AUTO AUTO}</li>
1676      *   <li>{@link #CONTROL_AWB_MODE_INCANDESCENT INCANDESCENT}</li>
1677      *   <li>{@link #CONTROL_AWB_MODE_FLUORESCENT FLUORESCENT}</li>
1678      *   <li>{@link #CONTROL_AWB_MODE_WARM_FLUORESCENT WARM_FLUORESCENT}</li>
1679      *   <li>{@link #CONTROL_AWB_MODE_DAYLIGHT DAYLIGHT}</li>
1680      *   <li>{@link #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT CLOUDY_DAYLIGHT}</li>
1681      *   <li>{@link #CONTROL_AWB_MODE_TWILIGHT TWILIGHT}</li>
1682      *   <li>{@link #CONTROL_AWB_MODE_SHADE SHADE}</li>
1683      * </ul></p>
1684      * <p><b>Available values for this device:</b><br>
1685      * {@link CameraCharacteristics#CONTROL_AWB_AVAILABLE_MODES android.control.awbAvailableModes}</p>
1686      * <p>This key is available on all devices.</p>
1687      *
1688      * @see CaptureRequest#COLOR_CORRECTION_GAINS
1689      * @see CaptureRequest#COLOR_CORRECTION_MODE
1690      * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
1691      * @see CaptureRequest#CONTROL_AE_MODE
1692      * @see CameraCharacteristics#CONTROL_AWB_AVAILABLE_MODES
1693      * @see CaptureRequest#CONTROL_AWB_LOCK
1694      * @see CaptureRequest#CONTROL_MODE
1695      * @see #CONTROL_AWB_MODE_OFF
1696      * @see #CONTROL_AWB_MODE_AUTO
1697      * @see #CONTROL_AWB_MODE_INCANDESCENT
1698      * @see #CONTROL_AWB_MODE_FLUORESCENT
1699      * @see #CONTROL_AWB_MODE_WARM_FLUORESCENT
1700      * @see #CONTROL_AWB_MODE_DAYLIGHT
1701      * @see #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT
1702      * @see #CONTROL_AWB_MODE_TWILIGHT
1703      * @see #CONTROL_AWB_MODE_SHADE
1704      */
1705     @PublicKey
1706     public static final Key<Integer> CONTROL_AWB_MODE =
1707             new Key<Integer>("android.control.awbMode", int.class);
1708 
1709     /**
1710      * <p>List of metering areas to use for auto-white-balance illuminant
1711      * estimation.</p>
1712      * <p>Not available if {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AWB android.control.maxRegionsAwb} is 0.
1713      * Otherwise will always be present.</p>
1714      * <p>The maximum number of regions supported by the device is determined by the value
1715      * of {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AWB android.control.maxRegionsAwb}.</p>
1716      * <p>The coordinate system is based on the active pixel array,
1717      * with (0,0) being the top-left pixel in the active pixel array, and
1718      * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
1719      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
1720      * bottom-right pixel in the active pixel array.</p>
1721      * <p>The weight must range from 0 to 1000, and represents a weight
1722      * for every pixel in the area. This means that a large metering area
1723      * with the same weight as a smaller area will have more effect in
1724      * the metering result. Metering areas can partially overlap and the
1725      * camera device will add the weights in the overlap region.</p>
1726      * <p>The weights are relative to weights of other white balance metering regions, so if
1727      * only one region is used, all non-zero weights will have the same effect. A region with
1728      * 0 weight is ignored.</p>
1729      * <p>If all regions have 0 weight, then no specific metering area needs to be used by the
1730      * camera device.</p>
1731      * <p>If the metering region is outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in
1732      * capture result metadata, the camera device will ignore the sections outside the crop
1733      * region and output only the intersection rectangle as the metering region in the result
1734      * metadata.  If the region is entirely outside the crop region, it will be ignored and
1735      * not reported in the result metadata.</p>
1736      * <p><b>Units</b>: Pixel coordinates within {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
1737      * <p><b>Range of valid values:</b><br>
1738      * Coordinates must be between <code>[(0,0), (width, height))</code> of
1739      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
1740      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
1741      *
1742      * @see CameraCharacteristics#CONTROL_MAX_REGIONS_AWB
1743      * @see CaptureRequest#SCALER_CROP_REGION
1744      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
1745      */
1746     @PublicKey
1747     public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AWB_REGIONS =
1748             new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.awbRegions", android.hardware.camera2.params.MeteringRectangle[].class);
1749 
1750     /**
1751      * <p>Information to the camera device 3A (auto-exposure,
1752      * auto-focus, auto-white balance) routines about the purpose
1753      * of this capture, to help the camera device to decide optimal 3A
1754      * strategy.</p>
1755      * <p>This control (except for MANUAL) is only effective if
1756      * <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF</code> and any 3A routine is active.</p>
1757      * <p>ZERO_SHUTTER_LAG will be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}
1758      * contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
1759      * {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains MANUAL_SENSOR. Other intent values are
1760      * always supported.</p>
1761      * <p><b>Possible values:</b>
1762      * <ul>
1763      *   <li>{@link #CONTROL_CAPTURE_INTENT_CUSTOM CUSTOM}</li>
1764      *   <li>{@link #CONTROL_CAPTURE_INTENT_PREVIEW PREVIEW}</li>
1765      *   <li>{@link #CONTROL_CAPTURE_INTENT_STILL_CAPTURE STILL_CAPTURE}</li>
1766      *   <li>{@link #CONTROL_CAPTURE_INTENT_VIDEO_RECORD VIDEO_RECORD}</li>
1767      *   <li>{@link #CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT VIDEO_SNAPSHOT}</li>
1768      *   <li>{@link #CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG ZERO_SHUTTER_LAG}</li>
1769      *   <li>{@link #CONTROL_CAPTURE_INTENT_MANUAL MANUAL}</li>
1770      * </ul></p>
1771      * <p>This key is available on all devices.</p>
1772      *
1773      * @see CaptureRequest#CONTROL_MODE
1774      * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
1775      * @see #CONTROL_CAPTURE_INTENT_CUSTOM
1776      * @see #CONTROL_CAPTURE_INTENT_PREVIEW
1777      * @see #CONTROL_CAPTURE_INTENT_STILL_CAPTURE
1778      * @see #CONTROL_CAPTURE_INTENT_VIDEO_RECORD
1779      * @see #CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT
1780      * @see #CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG
1781      * @see #CONTROL_CAPTURE_INTENT_MANUAL
1782      */
1783     @PublicKey
1784     public static final Key<Integer> CONTROL_CAPTURE_INTENT =
1785             new Key<Integer>("android.control.captureIntent", int.class);
1786 
1787     /**
1788      * <p>Current state of auto-white balance (AWB) algorithm.</p>
1789      * <p>Switching between or enabling AWB modes ({@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}) always
1790      * resets the AWB state to INACTIVE. Similarly, switching between {@link CaptureRequest#CONTROL_MODE android.control.mode},
1791      * or {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} if <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code> resets all
1792      * the algorithm states to INACTIVE.</p>
1793      * <p>The camera device can do several state transitions between two results, if it is
1794      * allowed by the state transition table. So INACTIVE may never actually be seen in
1795      * a result.</p>
1796      * <p>The state in the result is the state for this image (in sync with this image): if
1797      * AWB state becomes CONVERGED, then the image data associated with this result should
1798      * be good to use.</p>
1799      * <p>Below are state transition tables for different AWB modes.</p>
1800      * <p>When <code>{@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} != AWB_MODE_AUTO</code>:</p>
1801      * <table>
1802      * <thead>
1803      * <tr>
1804      * <th align="center">State</th>
1805      * <th align="center">Transition Cause</th>
1806      * <th align="center">New State</th>
1807      * <th align="center">Notes</th>
1808      * </tr>
1809      * </thead>
1810      * <tbody>
1811      * <tr>
1812      * <td align="center">INACTIVE</td>
1813      * <td align="center"></td>
1814      * <td align="center">INACTIVE</td>
1815      * <td align="center">Camera device auto white balance algorithm is disabled</td>
1816      * </tr>
1817      * </tbody>
1818      * </table>
1819      * <p>When {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} is AWB_MODE_AUTO:</p>
1820      * <table>
1821      * <thead>
1822      * <tr>
1823      * <th align="center">State</th>
1824      * <th align="center">Transition Cause</th>
1825      * <th align="center">New State</th>
1826      * <th align="center">Notes</th>
1827      * </tr>
1828      * </thead>
1829      * <tbody>
1830      * <tr>
1831      * <td align="center">INACTIVE</td>
1832      * <td align="center">Camera device initiates AWB scan</td>
1833      * <td align="center">SEARCHING</td>
1834      * <td align="center">Values changing</td>
1835      * </tr>
1836      * <tr>
1837      * <td align="center">INACTIVE</td>
1838      * <td align="center">{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is ON</td>
1839      * <td align="center">LOCKED</td>
1840      * <td align="center">Values locked</td>
1841      * </tr>
1842      * <tr>
1843      * <td align="center">SEARCHING</td>
1844      * <td align="center">Camera device finishes AWB scan</td>
1845      * <td align="center">CONVERGED</td>
1846      * <td align="center">Good values, not changing</td>
1847      * </tr>
1848      * <tr>
1849      * <td align="center">SEARCHING</td>
1850      * <td align="center">{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is ON</td>
1851      * <td align="center">LOCKED</td>
1852      * <td align="center">Values locked</td>
1853      * </tr>
1854      * <tr>
1855      * <td align="center">CONVERGED</td>
1856      * <td align="center">Camera device initiates AWB scan</td>
1857      * <td align="center">SEARCHING</td>
1858      * <td align="center">Values changing</td>
1859      * </tr>
1860      * <tr>
1861      * <td align="center">CONVERGED</td>
1862      * <td align="center">{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is ON</td>
1863      * <td align="center">LOCKED</td>
1864      * <td align="center">Values locked</td>
1865      * </tr>
1866      * <tr>
1867      * <td align="center">LOCKED</td>
1868      * <td align="center">{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is OFF</td>
1869      * <td align="center">SEARCHING</td>
1870      * <td align="center">Values not good after unlock</td>
1871      * </tr>
1872      * </tbody>
1873      * </table>
1874      * <p>For the above table, the camera device may skip reporting any state changes that happen
1875      * without application intervention (i.e. mode switch, trigger, locking). Any state that
1876      * can be skipped in that manner is called a transient state.</p>
1877      * <p>For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions
1878      * listed in above table, it is also legal for the camera device to skip one or more
1879      * transient states between two results. See below table for examples:</p>
1880      * <table>
1881      * <thead>
1882      * <tr>
1883      * <th align="center">State</th>
1884      * <th align="center">Transition Cause</th>
1885      * <th align="center">New State</th>
1886      * <th align="center">Notes</th>
1887      * </tr>
1888      * </thead>
1889      * <tbody>
1890      * <tr>
1891      * <td align="center">INACTIVE</td>
1892      * <td align="center">Camera device finished AWB scan</td>
1893      * <td align="center">CONVERGED</td>
1894      * <td align="center">Values are already good, transient states are skipped by camera device.</td>
1895      * </tr>
1896      * <tr>
1897      * <td align="center">LOCKED</td>
1898      * <td align="center">{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is OFF</td>
1899      * <td align="center">CONVERGED</td>
1900      * <td align="center">Values good after unlock, transient states are skipped by camera device.</td>
1901      * </tr>
1902      * </tbody>
1903      * </table>
1904      * <p><b>Possible values:</b>
1905      * <ul>
1906      *   <li>{@link #CONTROL_AWB_STATE_INACTIVE INACTIVE}</li>
1907      *   <li>{@link #CONTROL_AWB_STATE_SEARCHING SEARCHING}</li>
1908      *   <li>{@link #CONTROL_AWB_STATE_CONVERGED CONVERGED}</li>
1909      *   <li>{@link #CONTROL_AWB_STATE_LOCKED LOCKED}</li>
1910      * </ul></p>
1911      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
1912      * <p><b>Limited capability</b> -
1913      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
1914      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
1915      *
1916      * @see CaptureRequest#CONTROL_AWB_LOCK
1917      * @see CaptureRequest#CONTROL_AWB_MODE
1918      * @see CaptureRequest#CONTROL_MODE
1919      * @see CaptureRequest#CONTROL_SCENE_MODE
1920      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
1921      * @see #CONTROL_AWB_STATE_INACTIVE
1922      * @see #CONTROL_AWB_STATE_SEARCHING
1923      * @see #CONTROL_AWB_STATE_CONVERGED
1924      * @see #CONTROL_AWB_STATE_LOCKED
1925      */
1926     @PublicKey
1927     public static final Key<Integer> CONTROL_AWB_STATE =
1928             new Key<Integer>("android.control.awbState", int.class);
1929 
1930     /**
1931      * <p>A special color effect to apply.</p>
1932      * <p>When this mode is set, a color effect will be applied
1933      * to images produced by the camera device. The interpretation
1934      * and implementation of these color effects is left to the
1935      * implementor of the camera device, and should not be
1936      * depended on to be consistent (or present) across all
1937      * devices.</p>
1938      * <p><b>Possible values:</b>
1939      * <ul>
1940      *   <li>{@link #CONTROL_EFFECT_MODE_OFF OFF}</li>
1941      *   <li>{@link #CONTROL_EFFECT_MODE_MONO MONO}</li>
1942      *   <li>{@link #CONTROL_EFFECT_MODE_NEGATIVE NEGATIVE}</li>
1943      *   <li>{@link #CONTROL_EFFECT_MODE_SOLARIZE SOLARIZE}</li>
1944      *   <li>{@link #CONTROL_EFFECT_MODE_SEPIA SEPIA}</li>
1945      *   <li>{@link #CONTROL_EFFECT_MODE_POSTERIZE POSTERIZE}</li>
1946      *   <li>{@link #CONTROL_EFFECT_MODE_WHITEBOARD WHITEBOARD}</li>
1947      *   <li>{@link #CONTROL_EFFECT_MODE_BLACKBOARD BLACKBOARD}</li>
1948      *   <li>{@link #CONTROL_EFFECT_MODE_AQUA AQUA}</li>
1949      * </ul></p>
1950      * <p><b>Available values for this device:</b><br>
1951      * {@link CameraCharacteristics#CONTROL_AVAILABLE_EFFECTS android.control.availableEffects}</p>
1952      * <p>This key is available on all devices.</p>
1953      *
1954      * @see CameraCharacteristics#CONTROL_AVAILABLE_EFFECTS
1955      * @see #CONTROL_EFFECT_MODE_OFF
1956      * @see #CONTROL_EFFECT_MODE_MONO
1957      * @see #CONTROL_EFFECT_MODE_NEGATIVE
1958      * @see #CONTROL_EFFECT_MODE_SOLARIZE
1959      * @see #CONTROL_EFFECT_MODE_SEPIA
1960      * @see #CONTROL_EFFECT_MODE_POSTERIZE
1961      * @see #CONTROL_EFFECT_MODE_WHITEBOARD
1962      * @see #CONTROL_EFFECT_MODE_BLACKBOARD
1963      * @see #CONTROL_EFFECT_MODE_AQUA
1964      */
1965     @PublicKey
1966     public static final Key<Integer> CONTROL_EFFECT_MODE =
1967             new Key<Integer>("android.control.effectMode", int.class);
1968 
1969     /**
1970      * <p>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
1971      * routines.</p>
1972      * <p>This is a top-level 3A control switch. When set to OFF, all 3A control
1973      * by the camera device is disabled. The application must set the fields for
1974      * capture parameters itself.</p>
1975      * <p>When set to AUTO, the individual algorithm controls in
1976      * android.control.* are in effect, such as {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}.</p>
1977      * <p>When set to USE_SCENE_MODE, the individual controls in
1978      * android.control.* are mostly disabled, and the camera device implements
1979      * one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
1980      * as it wishes. The camera device scene mode 3A settings are provided by
1981      * {@link android.hardware.camera2.CaptureResult capture results}.</p>
1982      * <p>When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
1983      * is that this frame will not be used by camera device background 3A statistics
1984      * update, as if this frame is never captured. This mode can be used in the scenario
1985      * where the application doesn't want a 3A manual control capture to affect
1986      * the subsequent auto 3A capture results.</p>
1987      * <p><b>Possible values:</b>
1988      * <ul>
1989      *   <li>{@link #CONTROL_MODE_OFF OFF}</li>
1990      *   <li>{@link #CONTROL_MODE_AUTO AUTO}</li>
1991      *   <li>{@link #CONTROL_MODE_USE_SCENE_MODE USE_SCENE_MODE}</li>
1992      *   <li>{@link #CONTROL_MODE_OFF_KEEP_STATE OFF_KEEP_STATE}</li>
1993      * </ul></p>
1994      * <p><b>Available values for this device:</b><br>
1995      * {@link CameraCharacteristics#CONTROL_AVAILABLE_MODES android.control.availableModes}</p>
1996      * <p>This key is available on all devices.</p>
1997      *
1998      * @see CaptureRequest#CONTROL_AF_MODE
1999      * @see CameraCharacteristics#CONTROL_AVAILABLE_MODES
2000      * @see #CONTROL_MODE_OFF
2001      * @see #CONTROL_MODE_AUTO
2002      * @see #CONTROL_MODE_USE_SCENE_MODE
2003      * @see #CONTROL_MODE_OFF_KEEP_STATE
2004      */
2005     @PublicKey
2006     public static final Key<Integer> CONTROL_MODE =
2007             new Key<Integer>("android.control.mode", int.class);
2008 
2009     /**
2010      * <p>Control for which scene mode is currently active.</p>
2011      * <p>Scene modes are custom camera modes optimized for a certain set of conditions and
2012      * capture settings.</p>
2013      * <p>This is the mode that that is active when
2014      * <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code>. Aside from FACE_PRIORITY, these modes will
2015      * disable {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}, {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}
2016      * while in use.</p>
2017      * <p>The interpretation and implementation of these scene modes is left
2018      * to the implementor of the camera device. Their behavior will not be
2019      * consistent across all devices, and any given device may only implement
2020      * a subset of these modes.</p>
2021      * <p><b>Possible values:</b>
2022      * <ul>
2023      *   <li>{@link #CONTROL_SCENE_MODE_DISABLED DISABLED}</li>
2024      *   <li>{@link #CONTROL_SCENE_MODE_FACE_PRIORITY FACE_PRIORITY}</li>
2025      *   <li>{@link #CONTROL_SCENE_MODE_ACTION ACTION}</li>
2026      *   <li>{@link #CONTROL_SCENE_MODE_PORTRAIT PORTRAIT}</li>
2027      *   <li>{@link #CONTROL_SCENE_MODE_LANDSCAPE LANDSCAPE}</li>
2028      *   <li>{@link #CONTROL_SCENE_MODE_NIGHT NIGHT}</li>
2029      *   <li>{@link #CONTROL_SCENE_MODE_NIGHT_PORTRAIT NIGHT_PORTRAIT}</li>
2030      *   <li>{@link #CONTROL_SCENE_MODE_THEATRE THEATRE}</li>
2031      *   <li>{@link #CONTROL_SCENE_MODE_BEACH BEACH}</li>
2032      *   <li>{@link #CONTROL_SCENE_MODE_SNOW SNOW}</li>
2033      *   <li>{@link #CONTROL_SCENE_MODE_SUNSET SUNSET}</li>
2034      *   <li>{@link #CONTROL_SCENE_MODE_STEADYPHOTO STEADYPHOTO}</li>
2035      *   <li>{@link #CONTROL_SCENE_MODE_FIREWORKS FIREWORKS}</li>
2036      *   <li>{@link #CONTROL_SCENE_MODE_SPORTS SPORTS}</li>
2037      *   <li>{@link #CONTROL_SCENE_MODE_PARTY PARTY}</li>
2038      *   <li>{@link #CONTROL_SCENE_MODE_CANDLELIGHT CANDLELIGHT}</li>
2039      *   <li>{@link #CONTROL_SCENE_MODE_BARCODE BARCODE}</li>
2040      *   <li>{@link #CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO}</li>
2041      *   <li>{@link #CONTROL_SCENE_MODE_HDR HDR}</li>
2042      * </ul></p>
2043      * <p><b>Available values for this device:</b><br>
2044      * {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes}</p>
2045      * <p>This key is available on all devices.</p>
2046      *
2047      * @see CaptureRequest#CONTROL_AE_MODE
2048      * @see CaptureRequest#CONTROL_AF_MODE
2049      * @see CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES
2050      * @see CaptureRequest#CONTROL_AWB_MODE
2051      * @see CaptureRequest#CONTROL_MODE
2052      * @see #CONTROL_SCENE_MODE_DISABLED
2053      * @see #CONTROL_SCENE_MODE_FACE_PRIORITY
2054      * @see #CONTROL_SCENE_MODE_ACTION
2055      * @see #CONTROL_SCENE_MODE_PORTRAIT
2056      * @see #CONTROL_SCENE_MODE_LANDSCAPE
2057      * @see #CONTROL_SCENE_MODE_NIGHT
2058      * @see #CONTROL_SCENE_MODE_NIGHT_PORTRAIT
2059      * @see #CONTROL_SCENE_MODE_THEATRE
2060      * @see #CONTROL_SCENE_MODE_BEACH
2061      * @see #CONTROL_SCENE_MODE_SNOW
2062      * @see #CONTROL_SCENE_MODE_SUNSET
2063      * @see #CONTROL_SCENE_MODE_STEADYPHOTO
2064      * @see #CONTROL_SCENE_MODE_FIREWORKS
2065      * @see #CONTROL_SCENE_MODE_SPORTS
2066      * @see #CONTROL_SCENE_MODE_PARTY
2067      * @see #CONTROL_SCENE_MODE_CANDLELIGHT
2068      * @see #CONTROL_SCENE_MODE_BARCODE
2069      * @see #CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO
2070      * @see #CONTROL_SCENE_MODE_HDR
2071      */
2072     @PublicKey
2073     public static final Key<Integer> CONTROL_SCENE_MODE =
2074             new Key<Integer>("android.control.sceneMode", int.class);
2075 
2076     /**
2077      * <p>Whether video stabilization is
2078      * active.</p>
2079      * <p>Video stabilization automatically warps images from
2080      * the camera in order to stabilize motion between consecutive frames.</p>
2081      * <p>If enabled, video stabilization can modify the
2082      * {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} to keep the video stream stabilized.</p>
2083      * <p>Switching between different video stabilization modes may take several
2084      * frames to initialize, the camera device will report the current mode
2085      * in capture result metadata. For example, When "ON" mode is requested,
2086      * the video stabilization modes in the first several capture results may
2087      * still be "OFF", and it will become "ON" when the initialization is
2088      * done.</p>
2089      * <p>In addition, not all recording sizes or frame rates may be supported for
2090      * stabilization by a device that reports stabilization support. It is guaranteed
2091      * that an output targeting a MediaRecorder or MediaCodec will be stabilized if
2092      * the recording resolution is less than or equal to 1920 x 1080 (width less than
2093      * or equal to 1920, height less than or equal to 1080), and the recording
2094      * frame rate is less than or equal to 30fps.  At other sizes, the CaptureResult
2095      * {@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode} field will return
2096      * OFF if the recording output is not stabilized, or if there are no output
2097      * Surface types that can be stabilized.</p>
2098      * <p>If a camera device supports both this mode and OIS
2099      * ({@link CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE android.lens.opticalStabilizationMode}), turning both modes on may
2100      * produce undesirable interaction, so it is recommended not to enable
2101      * both at the same time.</p>
2102      * <p><b>Possible values:</b>
2103      * <ul>
2104      *   <li>{@link #CONTROL_VIDEO_STABILIZATION_MODE_OFF OFF}</li>
2105      *   <li>{@link #CONTROL_VIDEO_STABILIZATION_MODE_ON ON}</li>
2106      * </ul></p>
2107      * <p>This key is available on all devices.</p>
2108      *
2109      * @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
2110      * @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE
2111      * @see CaptureRequest#SCALER_CROP_REGION
2112      * @see #CONTROL_VIDEO_STABILIZATION_MODE_OFF
2113      * @see #CONTROL_VIDEO_STABILIZATION_MODE_ON
2114      */
2115     @PublicKey
2116     public static final Key<Integer> CONTROL_VIDEO_STABILIZATION_MODE =
2117             new Key<Integer>("android.control.videoStabilizationMode", int.class);
2118 
2119     /**
2120      * <p>The amount of additional sensitivity boost applied to output images
2121      * after RAW sensor data is captured.</p>
2122      * <p>Some camera devices support additional digital sensitivity boosting in the
2123      * camera processing pipeline after sensor RAW image is captured.
2124      * Such a boost will be applied to YUV/JPEG format output images but will not
2125      * have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE.</p>
2126      * <p>This key will be <code>null</code> for devices that do not support any RAW format
2127      * outputs. For devices that do support RAW format outputs, this key will always
2128      * present, and if a device does not support post RAW sensitivity boost, it will
2129      * list <code>100</code> in this key.</p>
2130      * <p>If the camera device cannot apply the exact boost requested, it will reduce the
2131      * boost to the nearest supported value.
2132      * The final boost value used will be available in the output capture result.</p>
2133      * <p>For devices that support post RAW sensitivity boost, the YUV/JPEG output images
2134      * of such device will have the total sensitivity of
2135      * <code>{@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity} * {@link CaptureRequest#CONTROL_POST_RAW_SENSITIVITY_BOOST android.control.postRawSensitivityBoost} / 100</code>
2136      * The sensitivity of RAW format images will always be <code>{@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}</code></p>
2137      * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
2138      * OFF; otherwise the auto-exposure algorithm will override this value.</p>
2139      * <p><b>Units</b>: ISO arithmetic units, the same as {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}</p>
2140      * <p><b>Range of valid values:</b><br>
2141      * {@link CameraCharacteristics#CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE android.control.postRawSensitivityBoostRange}</p>
2142      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2143      *
2144      * @see CaptureRequest#CONTROL_AE_MODE
2145      * @see CaptureRequest#CONTROL_MODE
2146      * @see CaptureRequest#CONTROL_POST_RAW_SENSITIVITY_BOOST
2147      * @see CameraCharacteristics#CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE
2148      * @see CaptureRequest#SENSOR_SENSITIVITY
2149      */
2150     @PublicKey
2151     public static final Key<Integer> CONTROL_POST_RAW_SENSITIVITY_BOOST =
2152             new Key<Integer>("android.control.postRawSensitivityBoost", int.class);
2153 
2154     /**
2155      * <p>Allow camera device to enable zero-shutter-lag mode for requests with
2156      * {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} == STILL_CAPTURE.</p>
2157      * <p>If enableZsl is <code>true</code>, the camera device may enable zero-shutter-lag mode for requests with
2158      * STILL_CAPTURE capture intent. The camera device may use images captured in the past to
2159      * produce output images for a zero-shutter-lag request. The result metadata including the
2160      * {@link CaptureResult#SENSOR_TIMESTAMP android.sensor.timestamp} reflects the source frames used to produce output images.
2161      * Therefore, the contents of the output images and the result metadata may be out of order
2162      * compared to previous regular requests. enableZsl does not affect requests with other
2163      * capture intents.</p>
2164      * <p>For example, when requests are submitted in the following order:
2165      *   Request A: enableZsl is ON, {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} is PREVIEW
2166      *   Request B: enableZsl is ON, {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} is STILL_CAPTURE</p>
2167      * <p>The output images for request B may have contents captured before the output images for
2168      * request A, and the result metadata for request B may be older than the result metadata for
2169      * request A.</p>
2170      * <p>Note that when enableZsl is <code>true</code>, it is not guaranteed to get output images captured in
2171      * the past for requests with STILL_CAPTURE capture intent.</p>
2172      * <p>For applications targeting SDK versions O and newer, the value of enableZsl in
2173      * TEMPLATE_STILL_CAPTURE template may be <code>true</code>. The value in other templates is always
2174      * <code>false</code> if present.</p>
2175      * <p>For applications targeting SDK versions older than O, the value of enableZsl in all
2176      * capture templates is always <code>false</code> if present.</p>
2177      * <p>For application-operated ZSL, use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.</p>
2178      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2179      *
2180      * @see CaptureRequest#CONTROL_CAPTURE_INTENT
2181      * @see CaptureResult#SENSOR_TIMESTAMP
2182      */
2183     @PublicKey
2184     public static final Key<Boolean> CONTROL_ENABLE_ZSL =
2185             new Key<Boolean>("android.control.enableZsl", boolean.class);
2186 
2187     /**
2188      * <p>Operation mode for edge
2189      * enhancement.</p>
2190      * <p>Edge enhancement improves sharpness and details in the captured image. OFF means
2191      * no enhancement will be applied by the camera device.</p>
2192      * <p>FAST/HIGH_QUALITY both mean camera device determined enhancement
2193      * will be applied. HIGH_QUALITY mode indicates that the
2194      * camera device will use the highest-quality enhancement algorithms,
2195      * even if it slows down capture rate. FAST means the camera device will
2196      * not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if
2197      * edge enhancement will slow down capture rate. Every output stream will have a similar
2198      * amount of enhancement applied.</p>
2199      * <p>ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
2200      * buffer of high-resolution images during preview and reprocess image(s) from that buffer
2201      * into a final capture when triggered by the user. In this mode, the camera device applies
2202      * edge enhancement to low-resolution streams (below maximum recording resolution) to
2203      * maximize preview quality, but does not apply edge enhancement to high-resolution streams,
2204      * since those will be reprocessed later if necessary.</p>
2205      * <p>For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera
2206      * device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively.
2207      * The camera device may adjust its internal edge enhancement parameters for best
2208      * image quality based on the {@link CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR android.reprocess.effectiveExposureFactor}, if it is set.</p>
2209      * <p><b>Possible values:</b>
2210      * <ul>
2211      *   <li>{@link #EDGE_MODE_OFF OFF}</li>
2212      *   <li>{@link #EDGE_MODE_FAST FAST}</li>
2213      *   <li>{@link #EDGE_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
2214      *   <li>{@link #EDGE_MODE_ZERO_SHUTTER_LAG ZERO_SHUTTER_LAG}</li>
2215      * </ul></p>
2216      * <p><b>Available values for this device:</b><br>
2217      * {@link CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES android.edge.availableEdgeModes}</p>
2218      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2219      * <p><b>Full capability</b> -
2220      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
2221      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
2222      *
2223      * @see CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES
2224      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
2225      * @see CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR
2226      * @see #EDGE_MODE_OFF
2227      * @see #EDGE_MODE_FAST
2228      * @see #EDGE_MODE_HIGH_QUALITY
2229      * @see #EDGE_MODE_ZERO_SHUTTER_LAG
2230      */
2231     @PublicKey
2232     public static final Key<Integer> EDGE_MODE =
2233             new Key<Integer>("android.edge.mode", int.class);
2234 
2235     /**
2236      * <p>The desired mode for for the camera device's flash control.</p>
2237      * <p>This control is only effective when flash unit is available
2238      * (<code>{@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} == true</code>).</p>
2239      * <p>When this control is used, the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} must be set to ON or OFF.
2240      * Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH,
2241      * ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.</p>
2242      * <p>When set to OFF, the camera device will not fire flash for this capture.</p>
2243      * <p>When set to SINGLE, the camera device will fire flash regardless of the camera
2244      * device's auto-exposure routine's result. When used in still capture case, this
2245      * control should be used along with auto-exposure (AE) precapture metering sequence
2246      * ({@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}), otherwise, the image may be incorrectly exposed.</p>
2247      * <p>When set to TORCH, the flash will be on continuously. This mode can be used
2248      * for use cases such as preview, auto-focus assist, still capture, or video recording.</p>
2249      * <p>The flash status will be reported by {@link CaptureResult#FLASH_STATE android.flash.state} in the capture result metadata.</p>
2250      * <p><b>Possible values:</b>
2251      * <ul>
2252      *   <li>{@link #FLASH_MODE_OFF OFF}</li>
2253      *   <li>{@link #FLASH_MODE_SINGLE SINGLE}</li>
2254      *   <li>{@link #FLASH_MODE_TORCH TORCH}</li>
2255      * </ul></p>
2256      * <p>This key is available on all devices.</p>
2257      *
2258      * @see CaptureRequest#CONTROL_AE_MODE
2259      * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
2260      * @see CameraCharacteristics#FLASH_INFO_AVAILABLE
2261      * @see CaptureResult#FLASH_STATE
2262      * @see #FLASH_MODE_OFF
2263      * @see #FLASH_MODE_SINGLE
2264      * @see #FLASH_MODE_TORCH
2265      */
2266     @PublicKey
2267     public static final Key<Integer> FLASH_MODE =
2268             new Key<Integer>("android.flash.mode", int.class);
2269 
2270     /**
2271      * <p>Current state of the flash
2272      * unit.</p>
2273      * <p>When the camera device doesn't have flash unit
2274      * (i.e. <code>{@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} == false</code>), this state will always be UNAVAILABLE.
2275      * Other states indicate the current flash status.</p>
2276      * <p>In certain conditions, this will be available on LEGACY devices:</p>
2277      * <ul>
2278      * <li>Flash-less cameras always return UNAVAILABLE.</li>
2279      * <li>Using {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} <code>==</code> ON_ALWAYS_FLASH
2280      *    will always return FIRED.</li>
2281      * <li>Using {@link CaptureRequest#FLASH_MODE android.flash.mode} <code>==</code> TORCH
2282      *    will always return FIRED.</li>
2283      * </ul>
2284      * <p>In all other conditions the state will not be available on
2285      * LEGACY devices (i.e. it will be <code>null</code>).</p>
2286      * <p><b>Possible values:</b>
2287      * <ul>
2288      *   <li>{@link #FLASH_STATE_UNAVAILABLE UNAVAILABLE}</li>
2289      *   <li>{@link #FLASH_STATE_CHARGING CHARGING}</li>
2290      *   <li>{@link #FLASH_STATE_READY READY}</li>
2291      *   <li>{@link #FLASH_STATE_FIRED FIRED}</li>
2292      *   <li>{@link #FLASH_STATE_PARTIAL PARTIAL}</li>
2293      * </ul></p>
2294      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2295      * <p><b>Limited capability</b> -
2296      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
2297      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
2298      *
2299      * @see CaptureRequest#CONTROL_AE_MODE
2300      * @see CameraCharacteristics#FLASH_INFO_AVAILABLE
2301      * @see CaptureRequest#FLASH_MODE
2302      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
2303      * @see #FLASH_STATE_UNAVAILABLE
2304      * @see #FLASH_STATE_CHARGING
2305      * @see #FLASH_STATE_READY
2306      * @see #FLASH_STATE_FIRED
2307      * @see #FLASH_STATE_PARTIAL
2308      */
2309     @PublicKey
2310     public static final Key<Integer> FLASH_STATE =
2311             new Key<Integer>("android.flash.state", int.class);
2312 
2313     /**
2314      * <p>Operational mode for hot pixel correction.</p>
2315      * <p>Hotpixel correction interpolates out, or otherwise removes, pixels
2316      * that do not accurately measure the incoming light (i.e. pixels that
2317      * are stuck at an arbitrary value or are oversensitive).</p>
2318      * <p><b>Possible values:</b>
2319      * <ul>
2320      *   <li>{@link #HOT_PIXEL_MODE_OFF OFF}</li>
2321      *   <li>{@link #HOT_PIXEL_MODE_FAST FAST}</li>
2322      *   <li>{@link #HOT_PIXEL_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
2323      * </ul></p>
2324      * <p><b>Available values for this device:</b><br>
2325      * {@link CameraCharacteristics#HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES android.hotPixel.availableHotPixelModes}</p>
2326      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2327      *
2328      * @see CameraCharacteristics#HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES
2329      * @see #HOT_PIXEL_MODE_OFF
2330      * @see #HOT_PIXEL_MODE_FAST
2331      * @see #HOT_PIXEL_MODE_HIGH_QUALITY
2332      */
2333     @PublicKey
2334     public static final Key<Integer> HOT_PIXEL_MODE =
2335             new Key<Integer>("android.hotPixel.mode", int.class);
2336 
2337     /**
2338      * <p>A location object to use when generating image GPS metadata.</p>
2339      * <p>Setting a location object in a request will include the GPS coordinates of the location
2340      * into any JPEG images captured based on the request. These coordinates can then be
2341      * viewed by anyone who receives the JPEG image.</p>
2342      * <p>This key is available on all devices.</p>
2343      */
2344     @PublicKey
2345     @SyntheticKey
2346     public static final Key<android.location.Location> JPEG_GPS_LOCATION =
2347             new Key<android.location.Location>("android.jpeg.gpsLocation", android.location.Location.class);
2348 
2349     /**
2350      * <p>GPS coordinates to include in output JPEG
2351      * EXIF.</p>
2352      * <p><b>Range of valid values:</b><br>
2353      * (-180 - 180], [-90,90], [-inf, inf]</p>
2354      * <p>This key is available on all devices.</p>
2355      * @hide
2356      */
2357     public static final Key<double[]> JPEG_GPS_COORDINATES =
2358             new Key<double[]>("android.jpeg.gpsCoordinates", double[].class);
2359 
2360     /**
2361      * <p>32 characters describing GPS algorithm to
2362      * include in EXIF.</p>
2363      * <p><b>Units</b>: UTF-8 null-terminated string</p>
2364      * <p>This key is available on all devices.</p>
2365      * @hide
2366      */
2367     public static final Key<String> JPEG_GPS_PROCESSING_METHOD =
2368             new Key<String>("android.jpeg.gpsProcessingMethod", String.class);
2369 
2370     /**
2371      * <p>Time GPS fix was made to include in
2372      * EXIF.</p>
2373      * <p><b>Units</b>: UTC in seconds since January 1, 1970</p>
2374      * <p>This key is available on all devices.</p>
2375      * @hide
2376      */
2377     public static final Key<Long> JPEG_GPS_TIMESTAMP =
2378             new Key<Long>("android.jpeg.gpsTimestamp", long.class);
2379 
2380     /**
2381      * <p>The orientation for a JPEG image.</p>
2382      * <p>The clockwise rotation angle in degrees, relative to the orientation
2383      * to the camera, that the JPEG picture needs to be rotated by, to be viewed
2384      * upright.</p>
2385      * <p>Camera devices may either encode this value into the JPEG EXIF header, or
2386      * rotate the image data to match this orientation. When the image data is rotated,
2387      * the thumbnail data will also be rotated.</p>
2388      * <p>Note that this orientation is relative to the orientation of the camera sensor, given
2389      * by {@link CameraCharacteristics#SENSOR_ORIENTATION android.sensor.orientation}.</p>
2390      * <p>To translate from the device orientation given by the Android sensor APIs, the following
2391      * sample code may be used:</p>
2392      * <pre><code>private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
2393      *     if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
2394      *     int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
2395      *
2396      *     // Round device orientation to a multiple of 90
2397      *     deviceOrientation = (deviceOrientation + 45) / 90 * 90;
2398      *
2399      *     // Reverse device orientation for front-facing cameras
2400      *     boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
2401      *     if (facingFront) deviceOrientation = -deviceOrientation;
2402      *
2403      *     // Calculate desired JPEG orientation relative to camera orientation to make
2404      *     // the image upright relative to the device orientation
2405      *     int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
2406      *
2407      *     return jpegOrientation;
2408      * }
2409      * </code></pre>
2410      * <p><b>Units</b>: Degrees in multiples of 90</p>
2411      * <p><b>Range of valid values:</b><br>
2412      * 0, 90, 180, 270</p>
2413      * <p>This key is available on all devices.</p>
2414      *
2415      * @see CameraCharacteristics#SENSOR_ORIENTATION
2416      */
2417     @PublicKey
2418     public static final Key<Integer> JPEG_ORIENTATION =
2419             new Key<Integer>("android.jpeg.orientation", int.class);
2420 
2421     /**
2422      * <p>Compression quality of the final JPEG
2423      * image.</p>
2424      * <p>85-95 is typical usage range.</p>
2425      * <p><b>Range of valid values:</b><br>
2426      * 1-100; larger is higher quality</p>
2427      * <p>This key is available on all devices.</p>
2428      */
2429     @PublicKey
2430     public static final Key<Byte> JPEG_QUALITY =
2431             new Key<Byte>("android.jpeg.quality", byte.class);
2432 
2433     /**
2434      * <p>Compression quality of JPEG
2435      * thumbnail.</p>
2436      * <p><b>Range of valid values:</b><br>
2437      * 1-100; larger is higher quality</p>
2438      * <p>This key is available on all devices.</p>
2439      */
2440     @PublicKey
2441     public static final Key<Byte> JPEG_THUMBNAIL_QUALITY =
2442             new Key<Byte>("android.jpeg.thumbnailQuality", byte.class);
2443 
2444     /**
2445      * <p>Resolution of embedded JPEG thumbnail.</p>
2446      * <p>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
2447      * but the captured JPEG will still be a valid image.</p>
2448      * <p>For best results, when issuing a request for a JPEG image, the thumbnail size selected
2449      * should have the same aspect ratio as the main JPEG output.</p>
2450      * <p>If the thumbnail image aspect ratio differs from the JPEG primary image aspect
2451      * ratio, the camera device creates the thumbnail by cropping it from the primary image.
2452      * For example, if the primary image has 4:3 aspect ratio, the thumbnail image has
2453      * 16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to
2454      * generate the thumbnail image. The thumbnail image will always have a smaller Field
2455      * Of View (FOV) than the primary image when aspect ratios differ.</p>
2456      * <p>When an {@link CaptureRequest#JPEG_ORIENTATION android.jpeg.orientation} of non-zero degree is requested,
2457      * the camera device will handle thumbnail rotation in one of the following ways:</p>
2458      * <ul>
2459      * <li>Set the {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}
2460      *   and keep jpeg and thumbnail image data unrotated.</li>
2461      * <li>Rotate the jpeg and thumbnail image data and not set
2462      *   {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}. In this
2463      *   case, LIMITED or FULL hardware level devices will report rotated thumnail size in
2464      *   capture result, so the width and height will be interchanged if 90 or 270 degree
2465      *   orientation is requested. LEGACY device will always report unrotated thumbnail
2466      *   size.</li>
2467      * </ul>
2468      * <p><b>Range of valid values:</b><br>
2469      * {@link CameraCharacteristics#JPEG_AVAILABLE_THUMBNAIL_SIZES android.jpeg.availableThumbnailSizes}</p>
2470      * <p>This key is available on all devices.</p>
2471      *
2472      * @see CameraCharacteristics#JPEG_AVAILABLE_THUMBNAIL_SIZES
2473      * @see CaptureRequest#JPEG_ORIENTATION
2474      */
2475     @PublicKey
2476     public static final Key<android.util.Size> JPEG_THUMBNAIL_SIZE =
2477             new Key<android.util.Size>("android.jpeg.thumbnailSize", android.util.Size.class);
2478 
2479     /**
2480      * <p>The desired lens aperture size, as a ratio of lens focal length to the
2481      * effective aperture diameter.</p>
2482      * <p>Setting this value is only supported on the camera devices that have a variable
2483      * aperture lens.</p>
2484      * <p>When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is OFF,
2485      * this can be set along with {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime},
2486      * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
2487      * to achieve manual exposure control.</p>
2488      * <p>The requested aperture value may take several frames to reach the
2489      * requested value; the camera device will report the current (intermediate)
2490      * aperture size in capture result metadata while the aperture is changing.
2491      * While the aperture is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.</p>
2492      * <p>When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is one of
2493      * the ON modes, this will be overridden by the camera device
2494      * auto-exposure algorithm, the overridden values are then provided
2495      * back to the user in the corresponding result.</p>
2496      * <p><b>Units</b>: The f-number (f/N)</p>
2497      * <p><b>Range of valid values:</b><br>
2498      * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES android.lens.info.availableApertures}</p>
2499      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2500      * <p><b>Full capability</b> -
2501      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
2502      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
2503      *
2504      * @see CaptureRequest#CONTROL_AE_MODE
2505      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
2506      * @see CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES
2507      * @see CaptureResult#LENS_STATE
2508      * @see CaptureRequest#SENSOR_EXPOSURE_TIME
2509      * @see CaptureRequest#SENSOR_FRAME_DURATION
2510      * @see CaptureRequest#SENSOR_SENSITIVITY
2511      */
2512     @PublicKey
2513     public static final Key<Float> LENS_APERTURE =
2514             new Key<Float>("android.lens.aperture", float.class);
2515 
2516     /**
2517      * <p>The desired setting for the lens neutral density filter(s).</p>
2518      * <p>This control will not be supported on most camera devices.</p>
2519      * <p>Lens filters are typically used to lower the amount of light the
2520      * sensor is exposed to (measured in steps of EV). As used here, an EV
2521      * step is the standard logarithmic representation, which are
2522      * non-negative, and inversely proportional to the amount of light
2523      * hitting the sensor.  For example, setting this to 0 would result
2524      * in no reduction of the incoming light, and setting this to 2 would
2525      * mean that the filter is set to reduce incoming light by two stops
2526      * (allowing 1/4 of the prior amount of light to the sensor).</p>
2527      * <p>It may take several frames before the lens filter density changes
2528      * to the requested value. While the filter density is still changing,
2529      * {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.</p>
2530      * <p><b>Units</b>: Exposure Value (EV)</p>
2531      * <p><b>Range of valid values:</b><br>
2532      * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES android.lens.info.availableFilterDensities}</p>
2533      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2534      * <p><b>Full capability</b> -
2535      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
2536      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
2537      *
2538      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
2539      * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES
2540      * @see CaptureResult#LENS_STATE
2541      */
2542     @PublicKey
2543     public static final Key<Float> LENS_FILTER_DENSITY =
2544             new Key<Float>("android.lens.filterDensity", float.class);
2545 
2546     /**
2547      * <p>The desired lens focal length; used for optical zoom.</p>
2548      * <p>This setting controls the physical focal length of the camera
2549      * device's lens. Changing the focal length changes the field of
2550      * view of the camera device, and is usually used for optical zoom.</p>
2551      * <p>Like {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} and {@link CaptureRequest#LENS_APERTURE android.lens.aperture}, this
2552      * setting won't be applied instantaneously, and it may take several
2553      * frames before the lens can change to the requested focal length.
2554      * While the focal length is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will
2555      * be set to MOVING.</p>
2556      * <p>Optical zoom will not be supported on most devices.</p>
2557      * <p><b>Units</b>: Millimeters</p>
2558      * <p><b>Range of valid values:</b><br>
2559      * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS android.lens.info.availableFocalLengths}</p>
2560      * <p>This key is available on all devices.</p>
2561      *
2562      * @see CaptureRequest#LENS_APERTURE
2563      * @see CaptureRequest#LENS_FOCUS_DISTANCE
2564      * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS
2565      * @see CaptureResult#LENS_STATE
2566      */
2567     @PublicKey
2568     public static final Key<Float> LENS_FOCAL_LENGTH =
2569             new Key<Float>("android.lens.focalLength", float.class);
2570 
2571     /**
2572      * <p>Desired distance to plane of sharpest focus,
2573      * measured from frontmost surface of the lens.</p>
2574      * <p>Should be zero for fixed-focus cameras</p>
2575      * <p><b>Units</b>: See {@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration} for details</p>
2576      * <p><b>Range of valid values:</b><br>
2577      * &gt;= 0</p>
2578      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2579      * <p><b>Full capability</b> -
2580      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
2581      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
2582      *
2583      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
2584      * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
2585      */
2586     @PublicKey
2587     public static final Key<Float> LENS_FOCUS_DISTANCE =
2588             new Key<Float>("android.lens.focusDistance", float.class);
2589 
2590     /**
2591      * <p>The range of scene distances that are in
2592      * sharp focus (depth of field).</p>
2593      * <p>If variable focus not supported, can still report
2594      * fixed depth of field range</p>
2595      * <p><b>Units</b>: A pair of focus distances in diopters: (near,
2596      * far); see {@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration} for details.</p>
2597      * <p><b>Range of valid values:</b><br>
2598      * &gt;=0</p>
2599      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2600      * <p><b>Limited capability</b> -
2601      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
2602      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
2603      *
2604      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
2605      * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
2606      */
2607     @PublicKey
2608     public static final Key<android.util.Pair<Float,Float>> LENS_FOCUS_RANGE =
2609             new Key<android.util.Pair<Float,Float>>("android.lens.focusRange", new TypeReference<android.util.Pair<Float,Float>>() {{ }});
2610 
2611     /**
2612      * <p>Sets whether the camera device uses optical image stabilization (OIS)
2613      * when capturing images.</p>
2614      * <p>OIS is used to compensate for motion blur due to small
2615      * movements of the camera during capture. Unlike digital image
2616      * stabilization ({@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode}), OIS
2617      * makes use of mechanical elements to stabilize the camera
2618      * sensor, and thus allows for longer exposure times before
2619      * camera shake becomes apparent.</p>
2620      * <p>Switching between different optical stabilization modes may take several
2621      * frames to initialize, the camera device will report the current mode in
2622      * capture result metadata. For example, When "ON" mode is requested, the
2623      * optical stabilization modes in the first several capture results may still
2624      * be "OFF", and it will become "ON" when the initialization is done.</p>
2625      * <p>If a camera device supports both OIS and digital image stabilization
2626      * ({@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode}), turning both modes on may produce undesirable
2627      * interaction, so it is recommended not to enable both at the same time.</p>
2628      * <p>Not all devices will support OIS; see
2629      * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION android.lens.info.availableOpticalStabilization} for
2630      * available controls.</p>
2631      * <p><b>Possible values:</b>
2632      * <ul>
2633      *   <li>{@link #LENS_OPTICAL_STABILIZATION_MODE_OFF OFF}</li>
2634      *   <li>{@link #LENS_OPTICAL_STABILIZATION_MODE_ON ON}</li>
2635      * </ul></p>
2636      * <p><b>Available values for this device:</b><br>
2637      * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION android.lens.info.availableOpticalStabilization}</p>
2638      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2639      * <p><b>Limited capability</b> -
2640      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
2641      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
2642      *
2643      * @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
2644      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
2645      * @see CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION
2646      * @see #LENS_OPTICAL_STABILIZATION_MODE_OFF
2647      * @see #LENS_OPTICAL_STABILIZATION_MODE_ON
2648      */
2649     @PublicKey
2650     public static final Key<Integer> LENS_OPTICAL_STABILIZATION_MODE =
2651             new Key<Integer>("android.lens.opticalStabilizationMode", int.class);
2652 
2653     /**
2654      * <p>Current lens status.</p>
2655      * <p>For lens parameters {@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}, {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance},
2656      * {@link CaptureRequest#LENS_FILTER_DENSITY android.lens.filterDensity} and {@link CaptureRequest#LENS_APERTURE android.lens.aperture}, when changes are requested,
2657      * they may take several frames to reach the requested values. This state indicates
2658      * the current status of the lens parameters.</p>
2659      * <p>When the state is STATIONARY, the lens parameters are not changing. This could be
2660      * either because the parameters are all fixed, or because the lens has had enough
2661      * time to reach the most recently-requested values.
2662      * If all these lens parameters are not changable for a camera device, as listed below:</p>
2663      * <ul>
2664      * <li>Fixed focus (<code>{@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} == 0</code>), which means
2665      * {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} parameter will always be 0.</li>
2666      * <li>Fixed focal length ({@link CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS android.lens.info.availableFocalLengths} contains single value),
2667      * which means the optical zoom is not supported.</li>
2668      * <li>No ND filter ({@link CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES android.lens.info.availableFilterDensities} contains only 0).</li>
2669      * <li>Fixed aperture ({@link CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES android.lens.info.availableApertures} contains single value).</li>
2670      * </ul>
2671      * <p>Then this state will always be STATIONARY.</p>
2672      * <p>When the state is MOVING, it indicates that at least one of the lens parameters
2673      * is changing.</p>
2674      * <p><b>Possible values:</b>
2675      * <ul>
2676      *   <li>{@link #LENS_STATE_STATIONARY STATIONARY}</li>
2677      *   <li>{@link #LENS_STATE_MOVING MOVING}</li>
2678      * </ul></p>
2679      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2680      * <p><b>Limited capability</b> -
2681      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
2682      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
2683      *
2684      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
2685      * @see CaptureRequest#LENS_APERTURE
2686      * @see CaptureRequest#LENS_FILTER_DENSITY
2687      * @see CaptureRequest#LENS_FOCAL_LENGTH
2688      * @see CaptureRequest#LENS_FOCUS_DISTANCE
2689      * @see CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES
2690      * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES
2691      * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS
2692      * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
2693      * @see #LENS_STATE_STATIONARY
2694      * @see #LENS_STATE_MOVING
2695      */
2696     @PublicKey
2697     public static final Key<Integer> LENS_STATE =
2698             new Key<Integer>("android.lens.state", int.class);
2699 
2700     /**
2701      * <p>The orientation of the camera relative to the sensor
2702      * coordinate system.</p>
2703      * <p>The four coefficients that describe the quaternion
2704      * rotation from the Android sensor coordinate system to a
2705      * camera-aligned coordinate system where the X-axis is
2706      * aligned with the long side of the image sensor, the Y-axis
2707      * is aligned with the short side of the image sensor, and
2708      * the Z-axis is aligned with the optical axis of the sensor.</p>
2709      * <p>To convert from the quaternion coefficients <code>(x,y,z,w)</code>
2710      * to the axis of rotation <code>(a_x, a_y, a_z)</code> and rotation
2711      * amount <code>theta</code>, the following formulas can be used:</p>
2712      * <pre><code> theta = 2 * acos(w)
2713      * a_x = x / sin(theta/2)
2714      * a_y = y / sin(theta/2)
2715      * a_z = z / sin(theta/2)
2716      * </code></pre>
2717      * <p>To create a 3x3 rotation matrix that applies the rotation
2718      * defined by this quaternion, the following matrix can be
2719      * used:</p>
2720      * <pre><code>R = [ 1 - 2y^2 - 2z^2,       2xy - 2zw,       2xz + 2yw,
2721      *            2xy + 2zw, 1 - 2x^2 - 2z^2,       2yz - 2xw,
2722      *            2xz - 2yw,       2yz + 2xw, 1 - 2x^2 - 2y^2 ]
2723      * </code></pre>
2724      * <p>This matrix can then be used to apply the rotation to a
2725      *  column vector point with</p>
2726      * <p><code>p' = Rp</code></p>
2727      * <p>where <code>p</code> is in the device sensor coordinate system, and
2728      *  <code>p'</code> is in the camera-oriented coordinate system.</p>
2729      * <p><b>Units</b>:
2730      * Quaternion coefficients</p>
2731      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2732      */
2733     @PublicKey
2734     public static final Key<float[]> LENS_POSE_ROTATION =
2735             new Key<float[]>("android.lens.poseRotation", float[].class);
2736 
2737     /**
2738      * <p>Position of the camera optical center.</p>
2739      * <p>The position of the camera device's lens optical center,
2740      * as a three-dimensional vector <code>(x,y,z)</code>, relative to the
2741      * optical center of the largest camera device facing in the
2742      * same direction as this camera, in the {@link android.hardware.SensorEvent Android sensor coordinate
2743      * axes}. Note that only the axis definitions are shared with
2744      * the sensor coordinate system, but not the origin.</p>
2745      * <p>If this device is the largest or only camera device with a
2746      * given facing, then this position will be <code>(0, 0, 0)</code>; a
2747      * camera device with a lens optical center located 3 cm from
2748      * the main sensor along the +X axis (to the right from the
2749      * user's perspective) will report <code>(0.03, 0, 0)</code>.</p>
2750      * <p>To transform a pixel coordinates between two cameras
2751      * facing the same direction, first the source camera
2752      * {@link CameraCharacteristics#LENS_RADIAL_DISTORTION android.lens.radialDistortion} must be corrected for.  Then
2753      * the source camera {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration} needs
2754      * to be applied, followed by the {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation}
2755      * of the source camera, the translation of the source camera
2756      * relative to the destination camera, the
2757      * {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} of the destination camera, and
2758      * finally the inverse of {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration}
2759      * of the destination camera. This obtains a
2760      * radial-distortion-free coordinate in the destination
2761      * camera pixel coordinates.</p>
2762      * <p>To compare this against a real image from the destination
2763      * camera, the destination camera image then needs to be
2764      * corrected for radial distortion before comparison or
2765      * sampling.</p>
2766      * <p><b>Units</b>: Meters</p>
2767      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2768      *
2769      * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION
2770      * @see CameraCharacteristics#LENS_POSE_ROTATION
2771      * @see CameraCharacteristics#LENS_RADIAL_DISTORTION
2772      */
2773     @PublicKey
2774     public static final Key<float[]> LENS_POSE_TRANSLATION =
2775             new Key<float[]>("android.lens.poseTranslation", float[].class);
2776 
2777     /**
2778      * <p>The parameters for this camera device's intrinsic
2779      * calibration.</p>
2780      * <p>The five calibration parameters that describe the
2781      * transform from camera-centric 3D coordinates to sensor
2782      * pixel coordinates:</p>
2783      * <pre><code>[f_x, f_y, c_x, c_y, s]
2784      * </code></pre>
2785      * <p>Where <code>f_x</code> and <code>f_y</code> are the horizontal and vertical
2786      * focal lengths, <code>[c_x, c_y]</code> is the position of the optical
2787      * axis, and <code>s</code> is a skew parameter for the sensor plane not
2788      * being aligned with the lens plane.</p>
2789      * <p>These are typically used within a transformation matrix K:</p>
2790      * <pre><code>K = [ f_x,   s, c_x,
2791      *        0, f_y, c_y,
2792      *        0    0,   1 ]
2793      * </code></pre>
2794      * <p>which can then be combined with the camera pose rotation
2795      * <code>R</code> and translation <code>t</code> ({@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} and
2796      * {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}, respective) to calculate the
2797      * complete transform from world coordinates to pixel
2798      * coordinates:</p>
2799      * <pre><code>P = [ K 0   * [ R t
2800      *      0 1 ]     0 1 ]
2801      * </code></pre>
2802      * <p>and with <code>p_w</code> being a point in the world coordinate system
2803      * and <code>p_s</code> being a point in the camera active pixel array
2804      * coordinate system, and with the mapping including the
2805      * homogeneous division by z:</p>
2806      * <pre><code> p_h = (x_h, y_h, z_h) = P p_w
2807      * p_s = p_h / z_h
2808      * </code></pre>
2809      * <p>so <code>[x_s, y_s]</code> is the pixel coordinates of the world
2810      * point, <code>z_s = 1</code>, and <code>w_s</code> is a measurement of disparity
2811      * (depth) in pixel coordinates.</p>
2812      * <p>Note that the coordinate system for this transform is the
2813      * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize} system,
2814      * where <code>(0,0)</code> is the top-left of the
2815      * preCorrectionActiveArraySize rectangle. Once the pose and
2816      * intrinsic calibration transforms have been applied to a
2817      * world point, then the {@link CameraCharacteristics#LENS_RADIAL_DISTORTION android.lens.radialDistortion}
2818      * transform needs to be applied, and the result adjusted to
2819      * be in the {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize} coordinate
2820      * system (where <code>(0, 0)</code> is the top-left of the
2821      * activeArraySize rectangle), to determine the final pixel
2822      * coordinate of the world point for processed (non-RAW)
2823      * output buffers.</p>
2824      * <p><b>Units</b>:
2825      * Pixels in the
2826      * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize}
2827      * coordinate system.</p>
2828      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2829      *
2830      * @see CameraCharacteristics#LENS_POSE_ROTATION
2831      * @see CameraCharacteristics#LENS_POSE_TRANSLATION
2832      * @see CameraCharacteristics#LENS_RADIAL_DISTORTION
2833      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
2834      * @see CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
2835      */
2836     @PublicKey
2837     public static final Key<float[]> LENS_INTRINSIC_CALIBRATION =
2838             new Key<float[]>("android.lens.intrinsicCalibration", float[].class);
2839 
2840     /**
2841      * <p>The correction coefficients to correct for this camera device's
2842      * radial and tangential lens distortion.</p>
2843      * <p>Four radial distortion coefficients <code>[kappa_0, kappa_1, kappa_2,
2844      * kappa_3]</code> and two tangential distortion coefficients
2845      * <code>[kappa_4, kappa_5]</code> that can be used to correct the
2846      * lens's geometric distortion with the mapping equations:</p>
2847      * <pre><code> x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
2848      *        kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
2849      *  y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
2850      *        kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
2851      * </code></pre>
2852      * <p>Here, <code>[x_c, y_c]</code> are the coordinates to sample in the
2853      * input image that correspond to the pixel values in the
2854      * corrected image at the coordinate <code>[x_i, y_i]</code>:</p>
2855      * <pre><code> correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
2856      * </code></pre>
2857      * <p>The pixel coordinates are defined in a normalized
2858      * coordinate system related to the
2859      * {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration} calibration fields.
2860      * Both <code>[x_i, y_i]</code> and <code>[x_c, y_c]</code> have <code>(0,0)</code> at the
2861      * lens optical center <code>[c_x, c_y]</code>. The maximum magnitudes
2862      * of both x and y coordinates are normalized to be 1 at the
2863      * edge further from the optical center, so the range
2864      * for both dimensions is <code>-1 &lt;= x &lt;= 1</code>.</p>
2865      * <p>Finally, <code>r</code> represents the radial distance from the
2866      * optical center, <code>r^2 = x_i^2 + y_i^2</code>, and its magnitude
2867      * is therefore no larger than <code>|r| &lt;= sqrt(2)</code>.</p>
2868      * <p>The distortion model used is the Brown-Conrady model.</p>
2869      * <p><b>Units</b>:
2870      * Unitless coefficients.</p>
2871      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2872      *
2873      * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION
2874      */
2875     @PublicKey
2876     public static final Key<float[]> LENS_RADIAL_DISTORTION =
2877             new Key<float[]>("android.lens.radialDistortion", float[].class);
2878 
2879     /**
2880      * <p>Mode of operation for the noise reduction algorithm.</p>
2881      * <p>The noise reduction algorithm attempts to improve image quality by removing
2882      * excessive noise added by the capture process, especially in dark conditions.</p>
2883      * <p>OFF means no noise reduction will be applied by the camera device, for both raw and
2884      * YUV domain.</p>
2885      * <p>MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove
2886      * demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF.
2887      * This mode is optional, may not be support by all devices. The application should check
2888      * {@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes} before using it.</p>
2889      * <p>FAST/HIGH_QUALITY both mean camera device determined noise filtering
2890      * will be applied. HIGH_QUALITY mode indicates that the camera device
2891      * will use the highest-quality noise filtering algorithms,
2892      * even if it slows down capture rate. FAST means the camera device will not
2893      * slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if
2894      * MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate.
2895      * Every output stream will have a similar amount of enhancement applied.</p>
2896      * <p>ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
2897      * buffer of high-resolution images during preview and reprocess image(s) from that buffer
2898      * into a final capture when triggered by the user. In this mode, the camera device applies
2899      * noise reduction to low-resolution streams (below maximum recording resolution) to maximize
2900      * preview quality, but does not apply noise reduction to high-resolution streams, since
2901      * those will be reprocessed later if necessary.</p>
2902      * <p>For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device
2903      * will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device
2904      * may adjust the noise reduction parameters for best image quality based on the
2905      * {@link CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR android.reprocess.effectiveExposureFactor} if it is set.</p>
2906      * <p><b>Possible values:</b>
2907      * <ul>
2908      *   <li>{@link #NOISE_REDUCTION_MODE_OFF OFF}</li>
2909      *   <li>{@link #NOISE_REDUCTION_MODE_FAST FAST}</li>
2910      *   <li>{@link #NOISE_REDUCTION_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
2911      *   <li>{@link #NOISE_REDUCTION_MODE_MINIMAL MINIMAL}</li>
2912      *   <li>{@link #NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG ZERO_SHUTTER_LAG}</li>
2913      * </ul></p>
2914      * <p><b>Available values for this device:</b><br>
2915      * {@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes}</p>
2916      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2917      * <p><b>Full capability</b> -
2918      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
2919      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
2920      *
2921      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
2922      * @see CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
2923      * @see CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR
2924      * @see #NOISE_REDUCTION_MODE_OFF
2925      * @see #NOISE_REDUCTION_MODE_FAST
2926      * @see #NOISE_REDUCTION_MODE_HIGH_QUALITY
2927      * @see #NOISE_REDUCTION_MODE_MINIMAL
2928      * @see #NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG
2929      */
2930     @PublicKey
2931     public static final Key<Integer> NOISE_REDUCTION_MODE =
2932             new Key<Integer>("android.noiseReduction.mode", int.class);
2933 
2934     /**
2935      * <p>Whether a result given to the framework is the
2936      * final one for the capture, or only a partial that contains a
2937      * subset of the full set of dynamic metadata
2938      * values.</p>
2939      * <p>The entries in the result metadata buffers for a
2940      * single capture may not overlap, except for this entry. The
2941      * FINAL buffers must retain FIFO ordering relative to the
2942      * requests that generate them, so the FINAL buffer for frame 3 must
2943      * always be sent to the framework after the FINAL buffer for frame 2, and
2944      * before the FINAL buffer for frame 4. PARTIAL buffers may be returned
2945      * in any order relative to other frames, but all PARTIAL buffers for a given
2946      * capture must arrive before the FINAL buffer for that capture. This entry may
2947      * only be used by the camera device if quirks.usePartialResult is set to 1.</p>
2948      * <p><b>Range of valid values:</b><br>
2949      * Optional. Default value is FINAL.</p>
2950      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2951      * @deprecated
2952      * @hide
2953      */
2954     @Deprecated
2955     public static final Key<Boolean> QUIRKS_PARTIAL_RESULT =
2956             new Key<Boolean>("android.quirks.partialResult", boolean.class);
2957 
2958     /**
2959      * <p>A frame counter set by the framework. This value monotonically
2960      * increases with every new result (that is, each new result has a unique
2961      * frameCount value).</p>
2962      * <p>Reset on release()</p>
2963      * <p><b>Units</b>: count of frames</p>
2964      * <p><b>Range of valid values:</b><br>
2965      * &gt; 0</p>
2966      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2967      * @deprecated
2968      * @hide
2969      */
2970     @Deprecated
2971     public static final Key<Integer> REQUEST_FRAME_COUNT =
2972             new Key<Integer>("android.request.frameCount", int.class);
2973 
2974     /**
2975      * <p>An application-specified ID for the current
2976      * request. Must be maintained unchanged in output
2977      * frame</p>
2978      * <p><b>Units</b>: arbitrary integer assigned by application</p>
2979      * <p><b>Range of valid values:</b><br>
2980      * Any int</p>
2981      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
2982      * @hide
2983      */
2984     public static final Key<Integer> REQUEST_ID =
2985             new Key<Integer>("android.request.id", int.class);
2986 
2987     /**
2988      * <p>Specifies the number of pipeline stages the frame went
2989      * through from when it was exposed to when the final completed result
2990      * was available to the framework.</p>
2991      * <p>Depending on what settings are used in the request, and
2992      * what streams are configured, the data may undergo less processing,
2993      * and some pipeline stages skipped.</p>
2994      * <p>See {@link CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH android.request.pipelineMaxDepth} for more details.</p>
2995      * <p><b>Range of valid values:</b><br>
2996      * &lt;= {@link CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH android.request.pipelineMaxDepth}</p>
2997      * <p>This key is available on all devices.</p>
2998      *
2999      * @see CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH
3000      */
3001     @PublicKey
3002     public static final Key<Byte> REQUEST_PIPELINE_DEPTH =
3003             new Key<Byte>("android.request.pipelineDepth", byte.class);
3004 
3005     /**
3006      * <p>The desired region of the sensor to read out for this capture.</p>
3007      * <p>This control can be used to implement digital zoom.</p>
3008      * <p>The crop region coordinate system is based off
3009      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with <code>(0, 0)</code> being the
3010      * top-left corner of the sensor active array.</p>
3011      * <p>Output streams use this rectangle to produce their output,
3012      * cropping to a smaller region if necessary to maintain the
3013      * stream's aspect ratio, then scaling the sensor input to
3014      * match the output's configured resolution.</p>
3015      * <p>The crop region is applied after the RAW to other color
3016      * space (e.g. YUV) conversion. Since raw streams
3017      * (e.g. RAW16) don't have the conversion stage, they are not
3018      * croppable. The crop region will be ignored by raw streams.</p>
3019      * <p>For non-raw streams, any additional per-stream cropping will
3020      * be done to maximize the final pixel area of the stream.</p>
3021      * <p>For example, if the crop region is set to a 4:3 aspect
3022      * ratio, then 4:3 streams will use the exact crop
3023      * region. 16:9 streams will further crop vertically
3024      * (letterbox).</p>
3025      * <p>Conversely, if the crop region is set to a 16:9, then 4:3
3026      * outputs will crop horizontally (pillarbox), and 16:9
3027      * streams will match exactly. These additional crops will
3028      * be centered within the crop region.</p>
3029      * <p>The width and height of the crop region cannot
3030      * be set to be smaller than
3031      * <code>floor( activeArraySize.width / {@link CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM android.scaler.availableMaxDigitalZoom} )</code> and
3032      * <code>floor( activeArraySize.height / {@link CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM android.scaler.availableMaxDigitalZoom} )</code>, respectively.</p>
3033      * <p>The camera device may adjust the crop region to account
3034      * for rounding and other hardware requirements; the final
3035      * crop region used will be included in the output capture
3036      * result.</p>
3037      * <p><b>Units</b>: Pixel coordinates relative to
3038      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
3039      * <p>This key is available on all devices.</p>
3040      *
3041      * @see CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM
3042      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
3043      */
3044     @PublicKey
3045     public static final Key<android.graphics.Rect> SCALER_CROP_REGION =
3046             new Key<android.graphics.Rect>("android.scaler.cropRegion", android.graphics.Rect.class);
3047 
3048     /**
3049      * <p>Duration each pixel is exposed to
3050      * light.</p>
3051      * <p>If the sensor can't expose this exact duration, it will shorten the
3052      * duration exposed to the nearest possible value (rather than expose longer).
3053      * The final exposure time used will be available in the output capture result.</p>
3054      * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
3055      * OFF; otherwise the auto-exposure algorithm will override this value.</p>
3056      * <p><b>Units</b>: Nanoseconds</p>
3057      * <p><b>Range of valid values:</b><br>
3058      * {@link CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE android.sensor.info.exposureTimeRange}</p>
3059      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3060      * <p><b>Full capability</b> -
3061      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
3062      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
3063      *
3064      * @see CaptureRequest#CONTROL_AE_MODE
3065      * @see CaptureRequest#CONTROL_MODE
3066      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
3067      * @see CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE
3068      */
3069     @PublicKey
3070     public static final Key<Long> SENSOR_EXPOSURE_TIME =
3071             new Key<Long>("android.sensor.exposureTime", long.class);
3072 
3073     /**
3074      * <p>Duration from start of frame exposure to
3075      * start of next frame exposure.</p>
3076      * <p>The maximum frame rate that can be supported by a camera subsystem is
3077      * a function of many factors:</p>
3078      * <ul>
3079      * <li>Requested resolutions of output image streams</li>
3080      * <li>Availability of binning / skipping modes on the imager</li>
3081      * <li>The bandwidth of the imager interface</li>
3082      * <li>The bandwidth of the various ISP processing blocks</li>
3083      * </ul>
3084      * <p>Since these factors can vary greatly between different ISPs and
3085      * sensors, the camera abstraction tries to represent the bandwidth
3086      * restrictions with as simple a model as possible.</p>
3087      * <p>The model presented has the following characteristics:</p>
3088      * <ul>
3089      * <li>The image sensor is always configured to output the smallest
3090      * resolution possible given the application's requested output stream
3091      * sizes.  The smallest resolution is defined as being at least as large
3092      * as the largest requested output stream size; the camera pipeline must
3093      * never digitally upsample sensor data when the crop region covers the
3094      * whole sensor. In general, this means that if only small output stream
3095      * resolutions are configured, the sensor can provide a higher frame
3096      * rate.</li>
3097      * <li>Since any request may use any or all the currently configured
3098      * output streams, the sensor and ISP must be configured to support
3099      * scaling a single capture to all the streams at the same time.  This
3100      * means the camera pipeline must be ready to produce the largest
3101      * requested output size without any delay.  Therefore, the overall
3102      * frame rate of a given configured stream set is governed only by the
3103      * largest requested stream resolution.</li>
3104      * <li>Using more than one output stream in a request does not affect the
3105      * frame duration.</li>
3106      * <li>Certain format-streams may need to do additional background processing
3107      * before data is consumed/produced by that stream. These processors
3108      * can run concurrently to the rest of the camera pipeline, but
3109      * cannot process more than 1 capture at a time.</li>
3110      * </ul>
3111      * <p>The necessary information for the application, given the model above,
3112      * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field using
3113      * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }.
3114      * These are used to determine the maximum frame rate / minimum frame
3115      * duration that is possible for a given stream configuration.</p>
3116      * <p>Specifically, the application can use the following rules to
3117      * determine the minimum frame duration it can request from the camera
3118      * device:</p>
3119      * <ol>
3120      * <li>Let the set of currently configured input/output streams
3121      * be called <code>S</code>.</li>
3122      * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking
3123      * it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }
3124      * (with its respective size/format). Let this set of frame durations be
3125      * called <code>F</code>.</li>
3126      * <li>For any given request <code>R</code>, the minimum frame duration allowed
3127      * for <code>R</code> is the maximum out of all values in <code>F</code>. Let the streams
3128      * used in <code>R</code> be called <code>S_r</code>.</li>
3129      * </ol>
3130      * <p>If none of the streams in <code>S_r</code> have a stall time (listed in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }
3131      * using its respective size/format), then the frame duration in <code>F</code>
3132      * determines the steady state frame rate that the application will get
3133      * if it uses <code>R</code> as a repeating request. Let this special kind of
3134      * request be called <code>Rsimple</code>.</p>
3135      * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
3136      * by a single capture of a new request <code>Rstall</code> (which has at least
3137      * one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
3138      * same minimum frame duration this will not cause a frame rate loss
3139      * if all buffers from the previous <code>Rstall</code> have already been
3140      * delivered.</p>
3141      * <p>For more details about stalling, see
3142      * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }.</p>
3143      * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
3144      * OFF; otherwise the auto-exposure algorithm will override this value.</p>
3145      * <p><b>Units</b>: Nanoseconds</p>
3146      * <p><b>Range of valid values:</b><br>
3147      * See {@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration},
3148      * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}. The duration
3149      * is capped to <code>max(duration, exposureTime + overhead)</code>.</p>
3150      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3151      * <p><b>Full capability</b> -
3152      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
3153      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
3154      *
3155      * @see CaptureRequest#CONTROL_AE_MODE
3156      * @see CaptureRequest#CONTROL_MODE
3157      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
3158      * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
3159      * @see CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION
3160      */
3161     @PublicKey
3162     public static final Key<Long> SENSOR_FRAME_DURATION =
3163             new Key<Long>("android.sensor.frameDuration", long.class);
3164 
3165     /**
3166      * <p>The amount of gain applied to sensor data
3167      * before processing.</p>
3168      * <p>The sensitivity is the standard ISO sensitivity value,
3169      * as defined in ISO 12232:2006.</p>
3170      * <p>The sensitivity must be within {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange}, and
3171      * if if it less than {@link CameraCharacteristics#SENSOR_MAX_ANALOG_SENSITIVITY android.sensor.maxAnalogSensitivity}, the camera device
3172      * is guaranteed to use only analog amplification for applying the gain.</p>
3173      * <p>If the camera device cannot apply the exact sensitivity
3174      * requested, it will reduce the gain to the nearest supported
3175      * value. The final sensitivity used will be available in the
3176      * output capture result.</p>
3177      * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
3178      * OFF; otherwise the auto-exposure algorithm will override this value.</p>
3179      * <p><b>Units</b>: ISO arithmetic units</p>
3180      * <p><b>Range of valid values:</b><br>
3181      * {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange}</p>
3182      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3183      * <p><b>Full capability</b> -
3184      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
3185      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
3186      *
3187      * @see CaptureRequest#CONTROL_AE_MODE
3188      * @see CaptureRequest#CONTROL_MODE
3189      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
3190      * @see CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE
3191      * @see CameraCharacteristics#SENSOR_MAX_ANALOG_SENSITIVITY
3192      */
3193     @PublicKey
3194     public static final Key<Integer> SENSOR_SENSITIVITY =
3195             new Key<Integer>("android.sensor.sensitivity", int.class);
3196 
3197     /**
3198      * <p>Time at start of exposure of first
3199      * row of the image sensor active array, in nanoseconds.</p>
3200      * <p>The timestamps are also included in all image
3201      * buffers produced for the same capture, and will be identical
3202      * on all the outputs.</p>
3203      * <p>When {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE android.sensor.info.timestampSource} <code>==</code> UNKNOWN,
3204      * the timestamps measure time since an unspecified starting point,
3205      * and are monotonically increasing. They can be compared with the
3206      * timestamps for other captures from the same camera device, but are
3207      * not guaranteed to be comparable to any other time source.</p>
3208      * <p>When {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE android.sensor.info.timestampSource} <code>==</code> REALTIME, the
3209      * timestamps measure time in the same timebase as {@link android.os.SystemClock#elapsedRealtimeNanos }, and they can
3210      * be compared to other timestamps from other subsystems that
3211      * are using that base.</p>
3212      * <p>For reprocessing, the timestamp will match the start of exposure of
3213      * the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the
3214      * timestamp} in the TotalCaptureResult that was used to create the
3215      * reprocess capture request.</p>
3216      * <p><b>Units</b>: Nanoseconds</p>
3217      * <p><b>Range of valid values:</b><br>
3218      * &gt; 0</p>
3219      * <p>This key is available on all devices.</p>
3220      *
3221      * @see CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE
3222      */
3223     @PublicKey
3224     public static final Key<Long> SENSOR_TIMESTAMP =
3225             new Key<Long>("android.sensor.timestamp", long.class);
3226 
3227     /**
3228      * <p>The estimated camera neutral color in the native sensor colorspace at
3229      * the time of capture.</p>
3230      * <p>This value gives the neutral color point encoded as an RGB value in the
3231      * native sensor color space.  The neutral color point indicates the
3232      * currently estimated white point of the scene illumination.  It can be
3233      * used to interpolate between the provided color transforms when
3234      * processing raw sensor data.</p>
3235      * <p>The order of the values is R, G, B; where R is in the lowest index.</p>
3236      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3237      */
3238     @PublicKey
3239     public static final Key<Rational[]> SENSOR_NEUTRAL_COLOR_POINT =
3240             new Key<Rational[]>("android.sensor.neutralColorPoint", Rational[].class);
3241 
3242     /**
3243      * <p>Noise model coefficients for each CFA mosaic channel.</p>
3244      * <p>This key contains two noise model coefficients for each CFA channel
3245      * corresponding to the sensor amplification (S) and sensor readout
3246      * noise (O).  These are given as pairs of coefficients for each channel
3247      * in the same order as channels listed for the CFA layout key
3248      * (see {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}).  This is
3249      * represented as an array of Pair&lt;Double, Double&gt;, where
3250      * the first member of the Pair at index n is the S coefficient and the
3251      * second member is the O coefficient for the nth color channel in the CFA.</p>
3252      * <p>These coefficients are used in a two parameter noise model to describe
3253      * the amount of noise present in the image for each CFA channel.  The
3254      * noise model used here is:</p>
3255      * <p>N(x) = sqrt(Sx + O)</p>
3256      * <p>Where x represents the recorded signal of a CFA channel normalized to
3257      * the range [0, 1], and S and O are the noise model coeffiecients for
3258      * that channel.</p>
3259      * <p>A more detailed description of the noise model can be found in the
3260      * Adobe DNG specification for the NoiseProfile tag.</p>
3261      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3262      *
3263      * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
3264      */
3265     @PublicKey
3266     public static final Key<android.util.Pair<Double,Double>[]> SENSOR_NOISE_PROFILE =
3267             new Key<android.util.Pair<Double,Double>[]>("android.sensor.noiseProfile", new TypeReference<android.util.Pair<Double,Double>[]>() {{ }});
3268 
3269     /**
3270      * <p>The worst-case divergence between Bayer green channels.</p>
3271      * <p>This value is an estimate of the worst case split between the
3272      * Bayer green channels in the red and blue rows in the sensor color
3273      * filter array.</p>
3274      * <p>The green split is calculated as follows:</p>
3275      * <ol>
3276      * <li>A 5x5 pixel (or larger) window W within the active sensor array is
3277      * chosen. The term 'pixel' here is taken to mean a group of 4 Bayer
3278      * mosaic channels (R, Gr, Gb, B).  The location and size of the window
3279      * chosen is implementation defined, and should be chosen to provide a
3280      * green split estimate that is both representative of the entire image
3281      * for this camera sensor, and can be calculated quickly.</li>
3282      * <li>The arithmetic mean of the green channels from the red
3283      * rows (mean_Gr) within W is computed.</li>
3284      * <li>The arithmetic mean of the green channels from the blue
3285      * rows (mean_Gb) within W is computed.</li>
3286      * <li>The maximum ratio R of the two means is computed as follows:
3287      * <code>R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))</code></li>
3288      * </ol>
3289      * <p>The ratio R is the green split divergence reported for this property,
3290      * which represents how much the green channels differ in the mosaic
3291      * pattern.  This value is typically used to determine the treatment of
3292      * the green mosaic channels when demosaicing.</p>
3293      * <p>The green split value can be roughly interpreted as follows:</p>
3294      * <ul>
3295      * <li>R &lt; 1.03 is a negligible split (&lt;3% divergence).</li>
3296      * <li>1.20 &lt;= R &gt;= 1.03 will require some software
3297      * correction to avoid demosaic errors (3-20% divergence).</li>
3298      * <li>R &gt; 1.20 will require strong software correction to produce
3299      * a usuable image (&gt;20% divergence).</li>
3300      * </ul>
3301      * <p><b>Range of valid values:</b><br></p>
3302      * <p>&gt;= 0</p>
3303      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3304      */
3305     @PublicKey
3306     public static final Key<Float> SENSOR_GREEN_SPLIT =
3307             new Key<Float>("android.sensor.greenSplit", float.class);
3308 
3309     /**
3310      * <p>A pixel <code>[R, G_even, G_odd, B]</code> that supplies the test pattern
3311      * when {@link CaptureRequest#SENSOR_TEST_PATTERN_MODE android.sensor.testPatternMode} is SOLID_COLOR.</p>
3312      * <p>Each color channel is treated as an unsigned 32-bit integer.
3313      * The camera device then uses the most significant X bits
3314      * that correspond to how many bits are in its Bayer raw sensor
3315      * output.</p>
3316      * <p>For example, a sensor with RAW10 Bayer output would use the
3317      * 10 most significant bits from each color channel.</p>
3318      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3319      *
3320      * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE
3321      */
3322     @PublicKey
3323     public static final Key<int[]> SENSOR_TEST_PATTERN_DATA =
3324             new Key<int[]>("android.sensor.testPatternData", int[].class);
3325 
3326     /**
3327      * <p>When enabled, the sensor sends a test pattern instead of
3328      * doing a real exposure from the camera.</p>
3329      * <p>When a test pattern is enabled, all manual sensor controls specified
3330      * by android.sensor.* will be ignored. All other controls should
3331      * work as normal.</p>
3332      * <p>For example, if manual flash is enabled, flash firing should still
3333      * occur (and that the test pattern remain unmodified, since the flash
3334      * would not actually affect it).</p>
3335      * <p>Defaults to OFF.</p>
3336      * <p><b>Possible values:</b>
3337      * <ul>
3338      *   <li>{@link #SENSOR_TEST_PATTERN_MODE_OFF OFF}</li>
3339      *   <li>{@link #SENSOR_TEST_PATTERN_MODE_SOLID_COLOR SOLID_COLOR}</li>
3340      *   <li>{@link #SENSOR_TEST_PATTERN_MODE_COLOR_BARS COLOR_BARS}</li>
3341      *   <li>{@link #SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY COLOR_BARS_FADE_TO_GRAY}</li>
3342      *   <li>{@link #SENSOR_TEST_PATTERN_MODE_PN9 PN9}</li>
3343      *   <li>{@link #SENSOR_TEST_PATTERN_MODE_CUSTOM1 CUSTOM1}</li>
3344      * </ul></p>
3345      * <p><b>Available values for this device:</b><br>
3346      * {@link CameraCharacteristics#SENSOR_AVAILABLE_TEST_PATTERN_MODES android.sensor.availableTestPatternModes}</p>
3347      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3348      *
3349      * @see CameraCharacteristics#SENSOR_AVAILABLE_TEST_PATTERN_MODES
3350      * @see #SENSOR_TEST_PATTERN_MODE_OFF
3351      * @see #SENSOR_TEST_PATTERN_MODE_SOLID_COLOR
3352      * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS
3353      * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY
3354      * @see #SENSOR_TEST_PATTERN_MODE_PN9
3355      * @see #SENSOR_TEST_PATTERN_MODE_CUSTOM1
3356      */
3357     @PublicKey
3358     public static final Key<Integer> SENSOR_TEST_PATTERN_MODE =
3359             new Key<Integer>("android.sensor.testPatternMode", int.class);
3360 
3361     /**
3362      * <p>Duration between the start of first row exposure
3363      * and the start of last row exposure.</p>
3364      * <p>This is the exposure time skew between the first and last
3365      * row exposure start times. The first row and the last row are
3366      * the first and last rows inside of the
3367      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.</p>
3368      * <p>For typical camera sensors that use rolling shutters, this is also equivalent
3369      * to the frame readout time.</p>
3370      * <p><b>Units</b>: Nanoseconds</p>
3371      * <p><b>Range of valid values:</b><br>
3372      * &gt;= 0 and &lt;
3373      * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }.</p>
3374      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3375      * <p><b>Limited capability</b> -
3376      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
3377      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
3378      *
3379      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
3380      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
3381      */
3382     @PublicKey
3383     public static final Key<Long> SENSOR_ROLLING_SHUTTER_SKEW =
3384             new Key<Long>("android.sensor.rollingShutterSkew", long.class);
3385 
3386     /**
3387      * <p>A per-frame dynamic black level offset for each of the color filter
3388      * arrangement (CFA) mosaic channels.</p>
3389      * <p>Camera sensor black levels may vary dramatically for different
3390      * capture settings (e.g. {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}). The fixed black
3391      * level reported by {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern} may be too
3392      * inaccurate to represent the actual value on a per-frame basis. The
3393      * camera device internal pipeline relies on reliable black level values
3394      * to process the raw images appropriately. To get the best image
3395      * quality, the camera device may choose to estimate the per frame black
3396      * level values either based on optically shielded black regions
3397      * ({@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions}) or its internal model.</p>
3398      * <p>This key reports the camera device estimated per-frame zero light
3399      * value for each of the CFA mosaic channels in the camera sensor. The
3400      * {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern} may only represent a coarse
3401      * approximation of the actual black level values. This value is the
3402      * black level used in camera device internal image processing pipeline
3403      * and generally more accurate than the fixed black level values.
3404      * However, since they are estimated values by the camera device, they
3405      * may not be as accurate as the black level values calculated from the
3406      * optical black pixels reported by {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions}.</p>
3407      * <p>The values are given in the same order as channels listed for the CFA
3408      * layout key (see {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}), i.e. the
3409      * nth value given corresponds to the black level offset for the nth
3410      * color channel listed in the CFA.</p>
3411      * <p>This key will be available if {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} is
3412      * available or the camera device advertises this key via
3413      * {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys }.</p>
3414      * <p><b>Range of valid values:</b><br>
3415      * &gt;= 0 for each.</p>
3416      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3417      *
3418      * @see CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN
3419      * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
3420      * @see CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS
3421      * @see CaptureRequest#SENSOR_SENSITIVITY
3422      */
3423     @PublicKey
3424     public static final Key<float[]> SENSOR_DYNAMIC_BLACK_LEVEL =
3425             new Key<float[]>("android.sensor.dynamicBlackLevel", float[].class);
3426 
3427     /**
3428      * <p>Maximum raw value output by sensor for this frame.</p>
3429      * <p>Since the {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern} may change for different
3430      * capture settings (e.g., {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}), the white
3431      * level will change accordingly. This key is similar to
3432      * {@link CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL android.sensor.info.whiteLevel}, but specifies the camera device
3433      * estimated white level for each frame.</p>
3434      * <p>This key will be available if {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} is
3435      * available or the camera device advertises this key via
3436      * {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys }.</p>
3437      * <p><b>Range of valid values:</b><br>
3438      * &gt;= 0</p>
3439      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3440      *
3441      * @see CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN
3442      * @see CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL
3443      * @see CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS
3444      * @see CaptureRequest#SENSOR_SENSITIVITY
3445      */
3446     @PublicKey
3447     public static final Key<Integer> SENSOR_DYNAMIC_WHITE_LEVEL =
3448             new Key<Integer>("android.sensor.dynamicWhiteLevel", int.class);
3449 
3450     /**
3451      * <p>Quality of lens shading correction applied
3452      * to the image data.</p>
3453      * <p>When set to OFF mode, no lens shading correction will be applied by the
3454      * camera device, and an identity lens shading map data will be provided
3455      * if <code>{@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} == ON</code>. For example, for lens
3456      * shading map with size of <code>[ 4, 3 ]</code>,
3457      * the output {@link CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP android.statistics.lensShadingCorrectionMap} for this case will be an identity
3458      * map shown below:</p>
3459      * <pre><code>[ 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
3460      *  1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
3461      *  1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
3462      *  1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
3463      *  1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
3464      *  1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0 ]
3465      * </code></pre>
3466      * <p>When set to other modes, lens shading correction will be applied by the camera
3467      * device. Applications can request lens shading map data by setting
3468      * {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} to ON, and then the camera device will provide lens
3469      * shading map data in {@link CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP android.statistics.lensShadingCorrectionMap}; the returned shading map
3470      * data will be the one applied by the camera device for this capture request.</p>
3471      * <p>The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
3472      * the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
3473      * AWB are in AUTO modes({@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} <code>!=</code> OFF and {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} <code>!=</code>
3474      * OFF), to get best results, it is recommended that the applications wait for the AE and AWB
3475      * to be converged before using the returned shading map data.</p>
3476      * <p><b>Possible values:</b>
3477      * <ul>
3478      *   <li>{@link #SHADING_MODE_OFF OFF}</li>
3479      *   <li>{@link #SHADING_MODE_FAST FAST}</li>
3480      *   <li>{@link #SHADING_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
3481      * </ul></p>
3482      * <p><b>Available values for this device:</b><br>
3483      * {@link CameraCharacteristics#SHADING_AVAILABLE_MODES android.shading.availableModes}</p>
3484      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3485      * <p><b>Full capability</b> -
3486      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
3487      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
3488      *
3489      * @see CaptureRequest#CONTROL_AE_MODE
3490      * @see CaptureRequest#CONTROL_AWB_MODE
3491      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
3492      * @see CameraCharacteristics#SHADING_AVAILABLE_MODES
3493      * @see CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP
3494      * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
3495      * @see #SHADING_MODE_OFF
3496      * @see #SHADING_MODE_FAST
3497      * @see #SHADING_MODE_HIGH_QUALITY
3498      */
3499     @PublicKey
3500     public static final Key<Integer> SHADING_MODE =
3501             new Key<Integer>("android.shading.mode", int.class);
3502 
3503     /**
3504      * <p>Operating mode for the face detector
3505      * unit.</p>
3506      * <p>Whether face detection is enabled, and whether it
3507      * should output just the basic fields or the full set of
3508      * fields.</p>
3509      * <p><b>Possible values:</b>
3510      * <ul>
3511      *   <li>{@link #STATISTICS_FACE_DETECT_MODE_OFF OFF}</li>
3512      *   <li>{@link #STATISTICS_FACE_DETECT_MODE_SIMPLE SIMPLE}</li>
3513      *   <li>{@link #STATISTICS_FACE_DETECT_MODE_FULL FULL}</li>
3514      * </ul></p>
3515      * <p><b>Available values for this device:</b><br>
3516      * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES android.statistics.info.availableFaceDetectModes}</p>
3517      * <p>This key is available on all devices.</p>
3518      *
3519      * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES
3520      * @see #STATISTICS_FACE_DETECT_MODE_OFF
3521      * @see #STATISTICS_FACE_DETECT_MODE_SIMPLE
3522      * @see #STATISTICS_FACE_DETECT_MODE_FULL
3523      */
3524     @PublicKey
3525     public static final Key<Integer> STATISTICS_FACE_DETECT_MODE =
3526             new Key<Integer>("android.statistics.faceDetectMode", int.class);
3527 
3528     /**
3529      * <p>List of unique IDs for detected faces.</p>
3530      * <p>Each detected face is given a unique ID that is valid for as long as the face is visible
3531      * to the camera device.  A face that leaves the field of view and later returns may be
3532      * assigned a new ID.</p>
3533      * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} == FULL
3534      * This key is available on all devices.</p>
3535      *
3536      * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
3537      * @hide
3538      */
3539     public static final Key<int[]> STATISTICS_FACE_IDS =
3540             new Key<int[]>("android.statistics.faceIds", int[].class);
3541 
3542     /**
3543      * <p>List of landmarks for detected
3544      * faces.</p>
3545      * <p>The coordinate system is that of {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with
3546      * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
3547      * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} == FULL
3548      * This key is available on all devices.</p>
3549      *
3550      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
3551      * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
3552      * @hide
3553      */
3554     public static final Key<int[]> STATISTICS_FACE_LANDMARKS =
3555             new Key<int[]>("android.statistics.faceLandmarks", int[].class);
3556 
3557     /**
3558      * <p>List of the bounding rectangles for detected
3559      * faces.</p>
3560      * <p>The coordinate system is that of {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with
3561      * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
3562      * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} != OFF
3563      * This key is available on all devices.</p>
3564      *
3565      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
3566      * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
3567      * @hide
3568      */
3569     public static final Key<android.graphics.Rect[]> STATISTICS_FACE_RECTANGLES =
3570             new Key<android.graphics.Rect[]>("android.statistics.faceRectangles", android.graphics.Rect[].class);
3571 
3572     /**
3573      * <p>List of the face confidence scores for
3574      * detected faces</p>
3575      * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} != OFF.</p>
3576      * <p><b>Range of valid values:</b><br>
3577      * 1-100</p>
3578      * <p>This key is available on all devices.</p>
3579      *
3580      * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
3581      * @hide
3582      */
3583     public static final Key<byte[]> STATISTICS_FACE_SCORES =
3584             new Key<byte[]>("android.statistics.faceScores", byte[].class);
3585 
3586     /**
3587      * <p>List of the faces detected through camera face detection
3588      * in this capture.</p>
3589      * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} <code>!=</code> OFF.</p>
3590      * <p>This key is available on all devices.</p>
3591      *
3592      * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
3593      */
3594     @PublicKey
3595     @SyntheticKey
3596     public static final Key<android.hardware.camera2.params.Face[]> STATISTICS_FACES =
3597             new Key<android.hardware.camera2.params.Face[]>("android.statistics.faces", android.hardware.camera2.params.Face[].class);
3598 
3599     /**
3600      * <p>The shading map is a low-resolution floating-point map
3601      * that lists the coefficients used to correct for vignetting, for each
3602      * Bayer color channel.</p>
3603      * <p>The map provided here is the same map that is used by the camera device to
3604      * correct both color shading and vignetting for output non-RAW images.</p>
3605      * <p>When there is no lens shading correction applied to RAW
3606      * output images ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} <code>==</code>
3607      * false), this map is the complete lens shading correction
3608      * map; when there is some lens shading correction applied to
3609      * the RAW output image ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied}<code>==</code> true), this map reports the remaining lens shading
3610      * correction map that needs to be applied to get shading
3611      * corrected images that match the camera device's output for
3612      * non-RAW formats.</p>
3613      * <p>For a complete shading correction map, the least shaded
3614      * section of the image will have a gain factor of 1; all
3615      * other sections will have gains above 1.</p>
3616      * <p>When {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} = TRANSFORM_MATRIX, the map
3617      * will take into account the colorCorrection settings.</p>
3618      * <p>The shading map is for the entire active pixel array, and is not
3619      * affected by the crop region specified in the request. Each shading map
3620      * entry is the value of the shading compensation map over a specific
3621      * pixel on the sensor.  Specifically, with a (N x M) resolution shading
3622      * map, and an active pixel array size (W x H), shading map entry
3623      * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
3624      * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
3625      * The map is assumed to be bilinearly interpolated between the sample points.</p>
3626      * <p>The channel order is [R, Geven, Godd, B], where Geven is the green
3627      * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
3628      * The shading map is stored in a fully interleaved format.</p>
3629      * <p>The shading map will generally have on the order of 30-40 rows and columns,
3630      * and will be smaller than 64x64.</p>
3631      * <p>As an example, given a very small map defined as:</p>
3632      * <pre><code>width,height = [ 4, 3 ]
3633      * values =
3634      * [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
3635      *     1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
3636      *   1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
3637      *     1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
3638      *   1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
3639      *     1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
3640      * </code></pre>
3641      * <p>The low-resolution scaling map images for each channel are
3642      * (displayed using nearest-neighbor interpolation):</p>
3643      * <p><img alt="Red lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
3644      * <img alt="Green (even rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
3645      * <img alt="Green (odd rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
3646      * <img alt="Blue lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
3647      * <p>As a visualization only, inverting the full-color map to recover an
3648      * image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:</p>
3649      * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
3650      * <p><b>Range of valid values:</b><br>
3651      * Each gain factor is &gt;= 1</p>
3652      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3653      * <p><b>Full capability</b> -
3654      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
3655      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
3656      *
3657      * @see CaptureRequest#COLOR_CORRECTION_MODE
3658      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
3659      * @see CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED
3660      */
3661     @PublicKey
3662     public static final Key<android.hardware.camera2.params.LensShadingMap> STATISTICS_LENS_SHADING_CORRECTION_MAP =
3663             new Key<android.hardware.camera2.params.LensShadingMap>("android.statistics.lensShadingCorrectionMap", android.hardware.camera2.params.LensShadingMap.class);
3664 
3665     /**
3666      * <p>The shading map is a low-resolution floating-point map
3667      * that lists the coefficients used to correct for vignetting and color shading,
3668      * for each Bayer color channel of RAW image data.</p>
3669      * <p>The map provided here is the same map that is used by the camera device to
3670      * correct both color shading and vignetting for output non-RAW images.</p>
3671      * <p>When there is no lens shading correction applied to RAW
3672      * output images ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} <code>==</code>
3673      * false), this map is the complete lens shading correction
3674      * map; when there is some lens shading correction applied to
3675      * the RAW output image ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied}<code>==</code> true), this map reports the remaining lens shading
3676      * correction map that needs to be applied to get shading
3677      * corrected images that match the camera device's output for
3678      * non-RAW formats.</p>
3679      * <p>For a complete shading correction map, the least shaded
3680      * section of the image will have a gain factor of 1; all
3681      * other sections will have gains above 1.</p>
3682      * <p>When {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} = TRANSFORM_MATRIX, the map
3683      * will take into account the colorCorrection settings.</p>
3684      * <p>The shading map is for the entire active pixel array, and is not
3685      * affected by the crop region specified in the request. Each shading map
3686      * entry is the value of the shading compensation map over a specific
3687      * pixel on the sensor.  Specifically, with a (N x M) resolution shading
3688      * map, and an active pixel array size (W x H), shading map entry
3689      * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
3690      * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
3691      * The map is assumed to be bilinearly interpolated between the sample points.</p>
3692      * <p>The channel order is [R, Geven, Godd, B], where Geven is the green
3693      * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
3694      * The shading map is stored in a fully interleaved format, and its size
3695      * is provided in the camera static metadata by android.lens.info.shadingMapSize.</p>
3696      * <p>The shading map will generally have on the order of 30-40 rows and columns,
3697      * and will be smaller than 64x64.</p>
3698      * <p>As an example, given a very small map defined as:</p>
3699      * <pre><code>android.lens.info.shadingMapSize = [ 4, 3 ]
3700      * android.statistics.lensShadingMap =
3701      * [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
3702      *     1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
3703      *   1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
3704      *     1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
3705      *   1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
3706      *     1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
3707      * </code></pre>
3708      * <p>The low-resolution scaling map images for each channel are
3709      * (displayed using nearest-neighbor interpolation):</p>
3710      * <p><img alt="Red lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
3711      * <img alt="Green (even rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
3712      * <img alt="Green (odd rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
3713      * <img alt="Blue lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
3714      * <p>As a visualization only, inverting the full-color map to recover an
3715      * image of a gray wall (using bicubic interpolation for visual quality)
3716      * as captured by the sensor gives:</p>
3717      * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
3718      * <p>Note that the RAW image data might be subject to lens shading
3719      * correction not reported on this map. Query
3720      * {@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} to see if RAW image data has subject
3721      * to lens shading correction. If {@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied}
3722      * is TRUE, the RAW image data is subject to partial or full lens shading
3723      * correction. In the case full lens shading correction is applied to RAW
3724      * images, the gain factor map reported in this key will contain all 1.0 gains.
3725      * In other words, the map reported in this key is the remaining lens shading
3726      * that needs to be applied on the RAW image to get images without lens shading
3727      * artifacts. See {@link CameraCharacteristics#REQUEST_MAX_NUM_OUTPUT_RAW android.request.maxNumOutputRaw} for a list of RAW image
3728      * formats.</p>
3729      * <p><b>Range of valid values:</b><br>
3730      * Each gain factor is &gt;= 1</p>
3731      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3732      * <p><b>Full capability</b> -
3733      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
3734      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
3735      *
3736      * @see CaptureRequest#COLOR_CORRECTION_MODE
3737      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
3738      * @see CameraCharacteristics#REQUEST_MAX_NUM_OUTPUT_RAW
3739      * @see CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED
3740      * @hide
3741      */
3742     public static final Key<float[]> STATISTICS_LENS_SHADING_MAP =
3743             new Key<float[]>("android.statistics.lensShadingMap", float[].class);
3744 
3745     /**
3746      * <p>The best-fit color channel gains calculated
3747      * by the camera device's statistics units for the current output frame.</p>
3748      * <p>This may be different than the gains used for this frame,
3749      * since statistics processing on data from a new frame
3750      * typically completes after the transform has already been
3751      * applied to that frame.</p>
3752      * <p>The 4 channel gains are defined in Bayer domain,
3753      * see {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} for details.</p>
3754      * <p>This value should always be calculated by the auto-white balance (AWB) block,
3755      * regardless of the android.control.* current values.</p>
3756      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3757      *
3758      * @see CaptureRequest#COLOR_CORRECTION_GAINS
3759      * @deprecated
3760      * @hide
3761      */
3762     @Deprecated
3763     public static final Key<float[]> STATISTICS_PREDICTED_COLOR_GAINS =
3764             new Key<float[]>("android.statistics.predictedColorGains", float[].class);
3765 
3766     /**
3767      * <p>The best-fit color transform matrix estimate
3768      * calculated by the camera device's statistics units for the current
3769      * output frame.</p>
3770      * <p>The camera device will provide the estimate from its
3771      * statistics unit on the white balance transforms to use
3772      * for the next frame. These are the values the camera device believes
3773      * are the best fit for the current output frame. This may
3774      * be different than the transform used for this frame, since
3775      * statistics processing on data from a new frame typically
3776      * completes after the transform has already been applied to
3777      * that frame.</p>
3778      * <p>These estimates must be provided for all frames, even if
3779      * capture settings and color transforms are set by the application.</p>
3780      * <p>This value should always be calculated by the auto-white balance (AWB) block,
3781      * regardless of the android.control.* current values.</p>
3782      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3783      * @deprecated
3784      * @hide
3785      */
3786     @Deprecated
3787     public static final Key<Rational[]> STATISTICS_PREDICTED_COLOR_TRANSFORM =
3788             new Key<Rational[]>("android.statistics.predictedColorTransform", Rational[].class);
3789 
3790     /**
3791      * <p>The camera device estimated scene illumination lighting
3792      * frequency.</p>
3793      * <p>Many light sources, such as most fluorescent lights, flicker at a rate
3794      * that depends on the local utility power standards. This flicker must be
3795      * accounted for by auto-exposure routines to avoid artifacts in captured images.
3796      * The camera device uses this entry to tell the application what the scene
3797      * illuminant frequency is.</p>
3798      * <p>When manual exposure control is enabled
3799      * (<code>{@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} == OFF</code> or <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} ==
3800      * OFF</code>), the {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE android.control.aeAntibandingMode} doesn't perform
3801      * antibanding, and the application can ensure it selects
3802      * exposure times that do not cause banding issues by looking
3803      * into this metadata field. See
3804      * {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE android.control.aeAntibandingMode} for more details.</p>
3805      * <p>Reports NONE if there doesn't appear to be flickering illumination.</p>
3806      * <p><b>Possible values:</b>
3807      * <ul>
3808      *   <li>{@link #STATISTICS_SCENE_FLICKER_NONE NONE}</li>
3809      *   <li>{@link #STATISTICS_SCENE_FLICKER_50HZ 50HZ}</li>
3810      *   <li>{@link #STATISTICS_SCENE_FLICKER_60HZ 60HZ}</li>
3811      * </ul></p>
3812      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3813      * <p><b>Full capability</b> -
3814      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
3815      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
3816      *
3817      * @see CaptureRequest#CONTROL_AE_ANTIBANDING_MODE
3818      * @see CaptureRequest#CONTROL_AE_MODE
3819      * @see CaptureRequest#CONTROL_MODE
3820      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
3821      * @see #STATISTICS_SCENE_FLICKER_NONE
3822      * @see #STATISTICS_SCENE_FLICKER_50HZ
3823      * @see #STATISTICS_SCENE_FLICKER_60HZ
3824      */
3825     @PublicKey
3826     public static final Key<Integer> STATISTICS_SCENE_FLICKER =
3827             new Key<Integer>("android.statistics.sceneFlicker", int.class);
3828 
3829     /**
3830      * <p>Operating mode for hot pixel map generation.</p>
3831      * <p>If set to <code>true</code>, a hot pixel map is returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.
3832      * If set to <code>false</code>, no hot pixel map will be returned.</p>
3833      * <p><b>Range of valid values:</b><br>
3834      * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES android.statistics.info.availableHotPixelMapModes}</p>
3835      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3836      *
3837      * @see CaptureResult#STATISTICS_HOT_PIXEL_MAP
3838      * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES
3839      */
3840     @PublicKey
3841     public static final Key<Boolean> STATISTICS_HOT_PIXEL_MAP_MODE =
3842             new Key<Boolean>("android.statistics.hotPixelMapMode", boolean.class);
3843 
3844     /**
3845      * <p>List of <code>(x, y)</code> coordinates of hot/defective pixels on the sensor.</p>
3846      * <p>A coordinate <code>(x, y)</code> must lie between <code>(0, 0)</code>, and
3847      * <code>(width - 1, height - 1)</code> (inclusive), which are the top-left and
3848      * bottom-right of the pixel array, respectively. The width and
3849      * height dimensions are given in {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}.
3850      * This may include hot pixels that lie outside of the active array
3851      * bounds given by {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.</p>
3852      * <p><b>Range of valid values:</b><br></p>
3853      * <p>n &lt;= number of pixels on the sensor.
3854      * The <code>(x, y)</code> coordinates must be bounded by
3855      * {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}.</p>
3856      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3857      *
3858      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
3859      * @see CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE
3860      */
3861     @PublicKey
3862     public static final Key<android.graphics.Point[]> STATISTICS_HOT_PIXEL_MAP =
3863             new Key<android.graphics.Point[]>("android.statistics.hotPixelMap", android.graphics.Point[].class);
3864 
3865     /**
3866      * <p>Whether the camera device will output the lens
3867      * shading map in output result metadata.</p>
3868      * <p>When set to ON,
3869      * android.statistics.lensShadingMap will be provided in
3870      * the output result metadata.</p>
3871      * <p>ON is always supported on devices with the RAW capability.</p>
3872      * <p><b>Possible values:</b>
3873      * <ul>
3874      *   <li>{@link #STATISTICS_LENS_SHADING_MAP_MODE_OFF OFF}</li>
3875      *   <li>{@link #STATISTICS_LENS_SHADING_MAP_MODE_ON ON}</li>
3876      * </ul></p>
3877      * <p><b>Available values for this device:</b><br>
3878      * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES android.statistics.info.availableLensShadingMapModes}</p>
3879      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3880      * <p><b>Full capability</b> -
3881      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
3882      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
3883      *
3884      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
3885      * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES
3886      * @see #STATISTICS_LENS_SHADING_MAP_MODE_OFF
3887      * @see #STATISTICS_LENS_SHADING_MAP_MODE_ON
3888      */
3889     @PublicKey
3890     public static final Key<Integer> STATISTICS_LENS_SHADING_MAP_MODE =
3891             new Key<Integer>("android.statistics.lensShadingMapMode", int.class);
3892 
3893     /**
3894      * <p>Tonemapping / contrast / gamma curve for the blue
3895      * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
3896      * CONTRAST_CURVE.</p>
3897      * <p>See android.tonemap.curveRed for more details.</p>
3898      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3899      * <p><b>Full capability</b> -
3900      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
3901      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
3902      *
3903      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
3904      * @see CaptureRequest#TONEMAP_MODE
3905      * @hide
3906      */
3907     public static final Key<float[]> TONEMAP_CURVE_BLUE =
3908             new Key<float[]>("android.tonemap.curveBlue", float[].class);
3909 
3910     /**
3911      * <p>Tonemapping / contrast / gamma curve for the green
3912      * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
3913      * CONTRAST_CURVE.</p>
3914      * <p>See android.tonemap.curveRed for more details.</p>
3915      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3916      * <p><b>Full capability</b> -
3917      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
3918      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
3919      *
3920      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
3921      * @see CaptureRequest#TONEMAP_MODE
3922      * @hide
3923      */
3924     public static final Key<float[]> TONEMAP_CURVE_GREEN =
3925             new Key<float[]>("android.tonemap.curveGreen", float[].class);
3926 
3927     /**
3928      * <p>Tonemapping / contrast / gamma curve for the red
3929      * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
3930      * CONTRAST_CURVE.</p>
3931      * <p>Each channel's curve is defined by an array of control points:</p>
3932      * <pre><code>android.tonemap.curveRed =
3933      *   [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
3934      * 2 &lt;= N &lt;= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</code></pre>
3935      * <p>These are sorted in order of increasing <code>Pin</code>; it is
3936      * required that input values 0.0 and 1.0 are included in the list to
3937      * define a complete mapping. For input values between control points,
3938      * the camera device must linearly interpolate between the control
3939      * points.</p>
3940      * <p>Each curve can have an independent number of points, and the number
3941      * of points can be less than max (that is, the request doesn't have to
3942      * always provide a curve with number of points equivalent to
3943      * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
3944      * <p>A few examples, and their corresponding graphical mappings; these
3945      * only specify the red channel and the precision is limited to 4
3946      * digits, for conciseness.</p>
3947      * <p>Linear mapping:</p>
3948      * <pre><code>android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
3949      * </code></pre>
3950      * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
3951      * <p>Invert mapping:</p>
3952      * <pre><code>android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
3953      * </code></pre>
3954      * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
3955      * <p>Gamma 1/2.2 mapping, with 16 control points:</p>
3956      * <pre><code>android.tonemap.curveRed = [
3957      *   0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
3958      *   0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
3959      *   0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
3960      *   0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
3961      * </code></pre>
3962      * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
3963      * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
3964      * <pre><code>android.tonemap.curveRed = [
3965      *   0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
3966      *   0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
3967      *   0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
3968      *   0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
3969      * </code></pre>
3970      * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
3971      * <p><b>Range of valid values:</b><br>
3972      * 0-1 on both input and output coordinates, normalized
3973      * as a floating-point value such that 0 == black and 1 == white.</p>
3974      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
3975      * <p><b>Full capability</b> -
3976      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
3977      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
3978      *
3979      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
3980      * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
3981      * @see CaptureRequest#TONEMAP_MODE
3982      * @hide
3983      */
3984     public static final Key<float[]> TONEMAP_CURVE_RED =
3985             new Key<float[]>("android.tonemap.curveRed", float[].class);
3986 
3987     /**
3988      * <p>Tonemapping / contrast / gamma curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode}
3989      * is CONTRAST_CURVE.</p>
3990      * <p>The tonemapCurve consist of three curves for each of red, green, and blue
3991      * channels respectively. The following example uses the red channel as an
3992      * example. The same logic applies to green and blue channel.
3993      * Each channel's curve is defined by an array of control points:</p>
3994      * <pre><code>curveRed =
3995      *   [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
3996      * 2 &lt;= N &lt;= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</code></pre>
3997      * <p>These are sorted in order of increasing <code>Pin</code>; it is always
3998      * guaranteed that input values 0.0 and 1.0 are included in the list to
3999      * define a complete mapping. For input values between control points,
4000      * the camera device must linearly interpolate between the control
4001      * points.</p>
4002      * <p>Each curve can have an independent number of points, and the number
4003      * of points can be less than max (that is, the request doesn't have to
4004      * always provide a curve with number of points equivalent to
4005      * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
4006      * <p>A few examples, and their corresponding graphical mappings; these
4007      * only specify the red channel and the precision is limited to 4
4008      * digits, for conciseness.</p>
4009      * <p>Linear mapping:</p>
4010      * <pre><code>curveRed = [ (0, 0), (1.0, 1.0) ]
4011      * </code></pre>
4012      * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
4013      * <p>Invert mapping:</p>
4014      * <pre><code>curveRed = [ (0, 1.0), (1.0, 0) ]
4015      * </code></pre>
4016      * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
4017      * <p>Gamma 1/2.2 mapping, with 16 control points:</p>
4018      * <pre><code>curveRed = [
4019      *   (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
4020      *   (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
4021      *   (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
4022      *   (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
4023      * </code></pre>
4024      * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
4025      * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
4026      * <pre><code>curveRed = [
4027      *   (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
4028      *   (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
4029      *   (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
4030      *   (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
4031      * </code></pre>
4032      * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
4033      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
4034      * <p><b>Full capability</b> -
4035      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
4036      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
4037      *
4038      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
4039      * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
4040      * @see CaptureRequest#TONEMAP_MODE
4041      */
4042     @PublicKey
4043     @SyntheticKey
4044     public static final Key<android.hardware.camera2.params.TonemapCurve> TONEMAP_CURVE =
4045             new Key<android.hardware.camera2.params.TonemapCurve>("android.tonemap.curve", android.hardware.camera2.params.TonemapCurve.class);
4046 
4047     /**
4048      * <p>High-level global contrast/gamma/tonemapping control.</p>
4049      * <p>When switching to an application-defined contrast curve by setting
4050      * {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} to CONTRAST_CURVE, the curve is defined
4051      * per-channel with a set of <code>(in, out)</code> points that specify the
4052      * mapping from input high-bit-depth pixel value to the output
4053      * low-bit-depth value.  Since the actual pixel ranges of both input
4054      * and output may change depending on the camera pipeline, the values
4055      * are specified by normalized floating-point numbers.</p>
4056      * <p>More-complex color mapping operations such as 3D color look-up
4057      * tables, selective chroma enhancement, or other non-linear color
4058      * transforms will be disabled when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
4059      * CONTRAST_CURVE.</p>
4060      * <p>When using either FAST or HIGH_QUALITY, the camera device will
4061      * emit its own tonemap curve in {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}.
4062      * These values are always available, and as close as possible to the
4063      * actually used nonlinear/nonglobal transforms.</p>
4064      * <p>If a request is sent with CONTRAST_CURVE with the camera device's
4065      * provided curve in FAST or HIGH_QUALITY, the image's tonemap will be
4066      * roughly the same.</p>
4067      * <p><b>Possible values:</b>
4068      * <ul>
4069      *   <li>{@link #TONEMAP_MODE_CONTRAST_CURVE CONTRAST_CURVE}</li>
4070      *   <li>{@link #TONEMAP_MODE_FAST FAST}</li>
4071      *   <li>{@link #TONEMAP_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
4072      *   <li>{@link #TONEMAP_MODE_GAMMA_VALUE GAMMA_VALUE}</li>
4073      *   <li>{@link #TONEMAP_MODE_PRESET_CURVE PRESET_CURVE}</li>
4074      * </ul></p>
4075      * <p><b>Available values for this device:</b><br>
4076      * {@link CameraCharacteristics#TONEMAP_AVAILABLE_TONE_MAP_MODES android.tonemap.availableToneMapModes}</p>
4077      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
4078      * <p><b>Full capability</b> -
4079      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
4080      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
4081      *
4082      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
4083      * @see CameraCharacteristics#TONEMAP_AVAILABLE_TONE_MAP_MODES
4084      * @see CaptureRequest#TONEMAP_CURVE
4085      * @see CaptureRequest#TONEMAP_MODE
4086      * @see #TONEMAP_MODE_CONTRAST_CURVE
4087      * @see #TONEMAP_MODE_FAST
4088      * @see #TONEMAP_MODE_HIGH_QUALITY
4089      * @see #TONEMAP_MODE_GAMMA_VALUE
4090      * @see #TONEMAP_MODE_PRESET_CURVE
4091      */
4092     @PublicKey
4093     public static final Key<Integer> TONEMAP_MODE =
4094             new Key<Integer>("android.tonemap.mode", int.class);
4095 
4096     /**
4097      * <p>Tonemapping curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
4098      * GAMMA_VALUE</p>
4099      * <p>The tonemap curve will be defined the following formula:
4100      * * OUT = pow(IN, 1.0 / gamma)
4101      * where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
4102      * pow is the power function and gamma is the gamma value specified by this
4103      * key.</p>
4104      * <p>The same curve will be applied to all color channels. The camera device
4105      * may clip the input gamma value to its supported range. The actual applied
4106      * value will be returned in capture result.</p>
4107      * <p>The valid range of gamma value varies on different devices, but values
4108      * within [1.0, 5.0] are guaranteed not to be clipped.</p>
4109      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
4110      *
4111      * @see CaptureRequest#TONEMAP_MODE
4112      */
4113     @PublicKey
4114     public static final Key<Float> TONEMAP_GAMMA =
4115             new Key<Float>("android.tonemap.gamma", float.class);
4116 
4117     /**
4118      * <p>Tonemapping curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
4119      * PRESET_CURVE</p>
4120      * <p>The tonemap curve will be defined by specified standard.</p>
4121      * <p>sRGB (approximated by 16 control points):</p>
4122      * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
4123      * <p>Rec. 709 (approximated by 16 control points):</p>
4124      * <p><img alt="Rec. 709 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png" /></p>
4125      * <p>Note that above figures show a 16 control points approximation of preset
4126      * curves. Camera devices may apply a different approximation to the curve.</p>
4127      * <p><b>Possible values:</b>
4128      * <ul>
4129      *   <li>{@link #TONEMAP_PRESET_CURVE_SRGB SRGB}</li>
4130      *   <li>{@link #TONEMAP_PRESET_CURVE_REC709 REC709}</li>
4131      * </ul></p>
4132      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
4133      *
4134      * @see CaptureRequest#TONEMAP_MODE
4135      * @see #TONEMAP_PRESET_CURVE_SRGB
4136      * @see #TONEMAP_PRESET_CURVE_REC709
4137      */
4138     @PublicKey
4139     public static final Key<Integer> TONEMAP_PRESET_CURVE =
4140             new Key<Integer>("android.tonemap.presetCurve", int.class);
4141 
4142     /**
4143      * <p>This LED is nominally used to indicate to the user
4144      * that the camera is powered on and may be streaming images back to the
4145      * Application Processor. In certain rare circumstances, the OS may
4146      * disable this when video is processed locally and not transmitted to
4147      * any untrusted applications.</p>
4148      * <p>In particular, the LED <em>must</em> always be on when the data could be
4149      * transmitted off the device. The LED <em>should</em> always be on whenever
4150      * data is stored locally on the device.</p>
4151      * <p>The LED <em>may</em> be off if a trusted application is using the data that
4152      * doesn't violate the above rules.</p>
4153      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
4154      * @hide
4155      */
4156     public static final Key<Boolean> LED_TRANSMIT =
4157             new Key<Boolean>("android.led.transmit", boolean.class);
4158 
4159     /**
4160      * <p>Whether black-level compensation is locked
4161      * to its current values, or is free to vary.</p>
4162      * <p>Whether the black level offset was locked for this frame.  Should be
4163      * ON if {@link CaptureRequest#BLACK_LEVEL_LOCK android.blackLevel.lock} was ON in the capture request, unless
4164      * a change in other capture settings forced the camera device to
4165      * perform a black level reset.</p>
4166      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
4167      * <p><b>Full capability</b> -
4168      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
4169      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
4170      *
4171      * @see CaptureRequest#BLACK_LEVEL_LOCK
4172      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
4173      */
4174     @PublicKey
4175     public static final Key<Boolean> BLACK_LEVEL_LOCK =
4176             new Key<Boolean>("android.blackLevel.lock", boolean.class);
4177 
4178     /**
4179      * <p>The frame number corresponding to the last request
4180      * with which the output result (metadata + buffers) has been fully
4181      * synchronized.</p>
4182      * <p>When a request is submitted to the camera device, there is usually a
4183      * delay of several frames before the controls get applied. A camera
4184      * device may either choose to account for this delay by implementing a
4185      * pipeline and carefully submit well-timed atomic control updates, or
4186      * it may start streaming control changes that span over several frame
4187      * boundaries.</p>
4188      * <p>In the latter case, whenever a request's settings change relative to
4189      * the previous submitted request, the full set of changes may take
4190      * multiple frame durations to fully take effect. Some settings may
4191      * take effect sooner (in less frame durations) than others.</p>
4192      * <p>While a set of control changes are being propagated, this value
4193      * will be CONVERGING.</p>
4194      * <p>Once it is fully known that a set of control changes have been
4195      * finished propagating, and the resulting updated control settings
4196      * have been read back by the camera device, this value will be set
4197      * to a non-negative frame number (corresponding to the request to
4198      * which the results have synchronized to).</p>
4199      * <p>Older camera device implementations may not have a way to detect
4200      * when all camera controls have been applied, and will always set this
4201      * value to UNKNOWN.</p>
4202      * <p>FULL capability devices will always have this value set to the
4203      * frame number of the request corresponding to this result.</p>
4204      * <p><em>Further details</em>:</p>
4205      * <ul>
4206      * <li>Whenever a request differs from the last request, any future
4207      * results not yet returned may have this value set to CONVERGING (this
4208      * could include any in-progress captures not yet returned by the camera
4209      * device, for more details see pipeline considerations below).</li>
4210      * <li>Submitting a series of multiple requests that differ from the
4211      * previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3)
4212      * moves the new synchronization frame to the last non-repeating
4213      * request (using the smallest frame number from the contiguous list of
4214      * repeating requests).</li>
4215      * <li>Submitting the same request repeatedly will not change this value
4216      * to CONVERGING, if it was already a non-negative value.</li>
4217      * <li>When this value changes to non-negative, that means that all of the
4218      * metadata controls from the request have been applied, all of the
4219      * metadata controls from the camera device have been read to the
4220      * updated values (into the result), and all of the graphics buffers
4221      * corresponding to this result are also synchronized to the request.</li>
4222      * </ul>
4223      * <p><em>Pipeline considerations</em>:</p>
4224      * <p>Submitting a request with updated controls relative to the previously
4225      * submitted requests may also invalidate the synchronization state
4226      * of all the results corresponding to currently in-flight requests.</p>
4227      * <p>In other words, results for this current request and up to
4228      * {@link CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH android.request.pipelineMaxDepth} prior requests may have their
4229      * android.sync.frameNumber change to CONVERGING.</p>
4230      * <p><b>Possible values:</b>
4231      * <ul>
4232      *   <li>{@link #SYNC_FRAME_NUMBER_CONVERGING CONVERGING}</li>
4233      *   <li>{@link #SYNC_FRAME_NUMBER_UNKNOWN UNKNOWN}</li>
4234      * </ul></p>
4235      * <p><b>Available values for this device:</b><br>
4236      * Either a non-negative value corresponding to a
4237      * <code>frame_number</code>, or one of the two enums (CONVERGING / UNKNOWN).</p>
4238      * <p>This key is available on all devices.</p>
4239      *
4240      * @see CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH
4241      * @see #SYNC_FRAME_NUMBER_CONVERGING
4242      * @see #SYNC_FRAME_NUMBER_UNKNOWN
4243      * @hide
4244      */
4245     public static final Key<Long> SYNC_FRAME_NUMBER =
4246             new Key<Long>("android.sync.frameNumber", long.class);
4247 
4248     /**
4249      * <p>The amount of exposure time increase factor applied to the original output
4250      * frame by the application processing before sending for reprocessing.</p>
4251      * <p>This is optional, and will be supported if the camera device supports YUV_REPROCESSING
4252      * capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains YUV_REPROCESSING).</p>
4253      * <p>For some YUV reprocessing use cases, the application may choose to filter the original
4254      * output frames to effectively reduce the noise to the same level as a frame that was
4255      * captured with longer exposure time. To be more specific, assuming the original captured
4256      * images were captured with a sensitivity of S and an exposure time of T, the model in
4257      * the camera device is that the amount of noise in the image would be approximately what
4258      * would be expected if the original capture parameters had been a sensitivity of
4259      * S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather
4260      * than S and T respectively. If the captured images were processed by the application
4261      * before being sent for reprocessing, then the application may have used image processing
4262      * algorithms and/or multi-frame image fusion to reduce the noise in the
4263      * application-processed images (input images). By using the effectiveExposureFactor
4264      * control, the application can communicate to the camera device the actual noise level
4265      * improvement in the application-processed image. With this information, the camera
4266      * device can select appropriate noise reduction and edge enhancement parameters to avoid
4267      * excessive noise reduction ({@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}) and insufficient edge
4268      * enhancement ({@link CaptureRequest#EDGE_MODE android.edge.mode}) being applied to the reprocessed frames.</p>
4269      * <p>For example, for multi-frame image fusion use case, the application may fuse
4270      * multiple output frames together to a final frame for reprocessing. When N image are
4271      * fused into 1 image for reprocessing, the exposure time increase factor could be up to
4272      * square root of N (based on a simple photon shot noise model). The camera device will
4273      * adjust the reprocessing noise reduction and edge enhancement parameters accordingly to
4274      * produce the best quality images.</p>
4275      * <p>This is relative factor, 1.0 indicates the application hasn't processed the input
4276      * buffer in a way that affects its effective exposure time.</p>
4277      * <p>This control is only effective for YUV reprocessing capture request. For noise
4278      * reduction reprocessing, it is only effective when <code>{@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode} != OFF</code>.
4279      * Similarly, for edge enhancement reprocessing, it is only effective when
4280      * <code>{@link CaptureRequest#EDGE_MODE android.edge.mode} != OFF</code>.</p>
4281      * <p><b>Units</b>: Relative exposure time increase factor.</p>
4282      * <p><b>Range of valid values:</b><br>
4283      * &gt;= 1.0</p>
4284      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
4285      * <p><b>Limited capability</b> -
4286      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
4287      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
4288      *
4289      * @see CaptureRequest#EDGE_MODE
4290      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
4291      * @see CaptureRequest#NOISE_REDUCTION_MODE
4292      * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
4293      */
4294     @PublicKey
4295     public static final Key<Float> REPROCESS_EFFECTIVE_EXPOSURE_FACTOR =
4296             new Key<Float>("android.reprocess.effectiveExposureFactor", float.class);
4297 
4298     /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
4299      * End generated code
4300      *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
4301 
4302 
4303 
4304 }
4305