• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define LOG_TAG "QCamera3HWI"
31 //#define LOG_NDEBUG 0
32 
33 #define __STDC_LIMIT_MACROS
34 
35 // To remove
36 #include <cutils/properties.h>
37 
38 // System dependencies
39 #include <dlfcn.h>
40 #include <fcntl.h>
41 #include <stdio.h>
42 #include <stdlib.h>
43 #include "utils/Timers.h"
44 #include "sys/ioctl.h"
45 #include <time.h>
46 #include <sync/sync.h>
47 #include "gralloc_priv.h"
48 #include <map>
49 #include <unordered_map>
50 
51 // Display dependencies
52 #include "qdMetaData.h"
53 
54 // Camera dependencies
55 #include "android/QCamera3External.h"
56 #include "util/QCameraFlash.h"
57 #include "QCamera3HWI.h"
58 #include "QCamera3VendorTags.h"
59 #include "QCameraTrace.h"
60 
61 // XML parsing
62 #include "tinyxml2.h"
63 
64 #include "HdrPlusClientUtils.h"
65 
66 extern "C" {
67 #include "mm_camera_dbg.h"
68 }
69 #include "cam_cond.h"
70 
71 using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
72 using namespace android;
73 
74 namespace qcamera {
75 
76 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
77 
78 #define EMPTY_PIPELINE_DELAY 2
79 // mm_camera has 2 partial results: 3A, and final result.
80 // HDR+ requests have 3 partial results: postview, next request ready, and final result.
81 #define PARTIAL_RESULT_COUNT 3
82 #define FRAME_SKIP_DELAY     0
83 
84 #define MAX_VALUE_8BIT ((1<<8)-1)
85 #define MAX_VALUE_10BIT ((1<<10)-1)
86 #define MAX_VALUE_12BIT ((1<<12)-1)
87 
88 #define VIDEO_4K_WIDTH  3840
89 #define VIDEO_4K_HEIGHT 2160
90 
91 #define MAX_EIS_WIDTH 3840
92 #define MAX_EIS_HEIGHT 2160
93 
94 #define MAX_RAW_STREAMS        1
95 #define MAX_STALLING_STREAMS   1
96 #define MAX_PROCESSED_STREAMS  3
97 /* Batch mode is enabled only if FPS set is equal to or greater than this */
98 #define MIN_FPS_FOR_BATCH_MODE (120)
99 #define PREVIEW_FPS_FOR_HFR    (30)
100 #define DEFAULT_VIDEO_FPS      (30.0)
101 #define TEMPLATE_MAX_PREVIEW_FPS (30.0)
102 #define MAX_HFR_BATCH_SIZE     (8)
103 #define REGIONS_TUPLE_COUNT    5
104 // Set a threshold for detection of missing buffers //seconds
105 #define MISSING_REQUEST_BUF_TIMEOUT 10
106 #define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
107 #define FLUSH_TIMEOUT 3
108 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
109 
110 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
111                                               CAM_QCOM_FEATURE_CROP |\
112                                               CAM_QCOM_FEATURE_ROTATION |\
113                                               CAM_QCOM_FEATURE_SHARPNESS |\
114                                               CAM_QCOM_FEATURE_SCALE |\
115                                               CAM_QCOM_FEATURE_CAC |\
116                                               CAM_QCOM_FEATURE_CDS )
117 /* Per configuration size for static metadata length*/
118 #define PER_CONFIGURATION_SIZE_3 (3)
119 
120 #define TIMEOUT_NEVER -1
121 
122 /* Face rect indices */
123 #define FACE_LEFT              0
124 #define FACE_TOP               1
125 #define FACE_RIGHT             2
126 #define FACE_BOTTOM            3
127 #define FACE_WEIGHT            4
128 
129 /* Face landmarks indices */
130 #define LEFT_EYE_X             0
131 #define LEFT_EYE_Y             1
132 #define RIGHT_EYE_X            2
133 #define RIGHT_EYE_Y            3
134 #define MOUTH_X                4
135 #define MOUTH_Y                5
136 #define TOTAL_LANDMARK_INDICES 6
137 
138 // Max preferred zoom
139 #define MAX_PREFERRED_ZOOM_RATIO 7.0
140 
141 // Whether to check for the GPU stride padding, or use the default
142 //#define CHECK_GPU_PIXEL_ALIGNMENT
143 
144 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
145 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
146 extern pthread_mutex_t gCamLock;
147 volatile uint32_t gCamHal3LogLevel = 1;
148 extern uint8_t gNumCameraSessions;
149 
150 // Note that this doesn't support concurrent front and back camera b/35960155.
151 // The following Easel related variables must be protected by gHdrPlusClientLock.
152 std::unique_ptr<EaselManagerClient> gEaselManagerClient;
153 bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
154 int32_t gActiveEaselClient = 0; // The number of active cameras on Easel.
155 std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
156 bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
157 std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
158 bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
159 bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
160 
161 // If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
162 bool gEaselBypassOnly;
163 
164 std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
165 
166 
167 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
168     {"On",  CAM_CDS_MODE_ON},
169     {"Off", CAM_CDS_MODE_OFF},
170     {"Auto",CAM_CDS_MODE_AUTO}
171 };
172 const QCamera3HardwareInterface::QCameraMap<
173         camera_metadata_enum_android_video_hdr_mode_t,
174         cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
175     { QCAMERA3_VIDEO_HDR_MODE_OFF,  CAM_VIDEO_HDR_MODE_OFF },
176     { QCAMERA3_VIDEO_HDR_MODE_ON,   CAM_VIDEO_HDR_MODE_ON }
177 };
178 
179 const QCamera3HardwareInterface::QCameraMap<
180         camera_metadata_enum_android_binning_correction_mode_t,
181         cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
182     { QCAMERA3_BINNING_CORRECTION_MODE_OFF,  CAM_BINNING_CORRECTION_MODE_OFF },
183     { QCAMERA3_BINNING_CORRECTION_MODE_ON,   CAM_BINNING_CORRECTION_MODE_ON }
184 };
185 
186 const QCamera3HardwareInterface::QCameraMap<
187         camera_metadata_enum_android_ir_mode_t,
188         cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
189     {QCAMERA3_IR_MODE_OFF,  CAM_IR_MODE_OFF},
190     {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
191     {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
192 };
193 
194 const QCamera3HardwareInterface::QCameraMap<
195         camera_metadata_enum_android_control_effect_mode_t,
196         cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
197     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
198     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
199     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
200     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
201     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
202     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
203     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
204     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
205     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
206 };
207 
208 const QCamera3HardwareInterface::QCameraMap<
209         camera_metadata_enum_android_control_awb_mode_t,
210         cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
211     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
212     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
213     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
214     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
215     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
216     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
217     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
218     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
219     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
220 };
221 
222 const QCamera3HardwareInterface::QCameraMap<
223         camera_metadata_enum_android_control_scene_mode_t,
224         cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
225     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
226     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
227     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
228     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
229     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
230     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
231     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
232     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
233     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
234     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
235     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
236     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
237     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
238     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
239     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
240     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE},
241     { ANDROID_CONTROL_SCENE_MODE_HDR,            CAM_SCENE_MODE_HDR}
242 };
243 
244 const QCamera3HardwareInterface::QCameraMap<
245         camera_metadata_enum_android_control_af_mode_t,
246         cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
247     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
248     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
249     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
250     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
251     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
252     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
253     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
254 };
255 
256 const QCamera3HardwareInterface::QCameraMap<
257         camera_metadata_enum_android_color_correction_aberration_mode_t,
258         cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
259     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
260             CAM_COLOR_CORRECTION_ABERRATION_OFF },
261     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
262             CAM_COLOR_CORRECTION_ABERRATION_FAST },
263     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
264             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
265 };
266 
267 const QCamera3HardwareInterface::QCameraMap<
268         camera_metadata_enum_android_control_ae_antibanding_mode_t,
269         cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
270     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
271     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
272     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
273     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
274 };
275 
276 const QCamera3HardwareInterface::QCameraMap<
277         camera_metadata_enum_android_control_ae_mode_t,
278         cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
279     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
280     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
281     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
282     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
283     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
284     { ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
285 };
286 
287 const QCamera3HardwareInterface::QCameraMap<
288         camera_metadata_enum_android_flash_mode_t,
289         cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
290     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
291     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
292     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
293 };
294 
295 const QCamera3HardwareInterface::QCameraMap<
296         camera_metadata_enum_android_statistics_face_detect_mode_t,
297         cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
298     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
299     { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
300     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
301 };
302 
303 const QCamera3HardwareInterface::QCameraMap<
304         camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
305         cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
306     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
307       CAM_FOCUS_UNCALIBRATED },
308     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
309       CAM_FOCUS_APPROXIMATE },
310     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
311       CAM_FOCUS_CALIBRATED }
312 };
313 
314 const QCamera3HardwareInterface::QCameraMap<
315         camera_metadata_enum_android_lens_state_t,
316         cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
317     { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
318     { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
319 };
320 
321 const int32_t available_thumbnail_sizes[] = {0, 0,
322                                              176, 144,
323                                              240, 144,
324                                              256, 144,
325                                              240, 160,
326                                              256, 154,
327                                              240, 240,
328                                              320, 240};
329 
330 const QCamera3HardwareInterface::QCameraMap<
331         camera_metadata_enum_android_sensor_test_pattern_mode_t,
332         cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
333     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
334     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
335     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
336     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
337     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
338     { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
339 };
340 
341 /* Since there is no mapping for all the options some Android enum are not listed.
342  * Also, the order in this list is important because while mapping from HAL to Android it will
343  * traverse from lower to higher index which means that for HAL values that are map to different
344  * Android values, the traverse logic will select the first one found.
345  */
346 const QCamera3HardwareInterface::QCameraMap<
347         camera_metadata_enum_android_sensor_reference_illuminant1_t,
348         cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
349     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
350     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
351     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
352     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
353     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
354     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
355     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
356     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
357     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
358     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
359     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
360     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
361     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
362     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
363     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
364     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
365 };
366 
367 const QCamera3HardwareInterface::QCameraMap<
368         int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
369     { 60, CAM_HFR_MODE_60FPS},
370     { 90, CAM_HFR_MODE_90FPS},
371     { 120, CAM_HFR_MODE_120FPS},
372     { 150, CAM_HFR_MODE_150FPS},
373     { 180, CAM_HFR_MODE_180FPS},
374     { 210, CAM_HFR_MODE_210FPS},
375     { 240, CAM_HFR_MODE_240FPS},
376     { 480, CAM_HFR_MODE_480FPS},
377 };
378 
379 const QCamera3HardwareInterface::QCameraMap<
380         qcamera3_ext_instant_aec_mode_t,
381         cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
382     { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
383     { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
384     { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
385 };
386 
387 const QCamera3HardwareInterface::QCameraMap<
388         qcamera3_ext_exposure_meter_mode_t,
389         cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
390     { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
391     { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
392     { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
393     { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
394     { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
395     { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
396     { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
397 };
398 
399 const QCamera3HardwareInterface::QCameraMap<
400         qcamera3_ext_iso_mode_t,
401         cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
402     { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
403     { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
404     { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
405     { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
406     { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
407     { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
408     { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
409     { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
410 };
411 
412 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
413     .initialize                         = QCamera3HardwareInterface::initialize,
414     .configure_streams                  = QCamera3HardwareInterface::configure_streams,
415     .register_stream_buffers            = NULL,
416     .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
417     .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
418     .get_metadata_vendor_tag_ops        = NULL,
419     .dump                               = QCamera3HardwareInterface::dump,
420     .flush                              = QCamera3HardwareInterface::flush,
421     .reserved                           = {0},
422 };
423 
424 typedef std::tuple<int32_t, int32_t, int32_t, int32_t> config_entry;
425 
operator ==(const config_entry & lhs,const config_entry & rhs)426 bool operator == (const config_entry & lhs, const config_entry & rhs) {
427     return (std::get<0> (lhs) == std::get<0> (rhs)) &&
428         (std::get<1> (lhs) == std::get<1> (rhs)) &&
429         (std::get<2> (lhs) == std::get<2> (rhs)) &&
430         (std::get<3> (lhs) == std::get<3> (rhs));
431 }
432 
433 struct ConfigEntryHash {
operator ()qcamera::ConfigEntryHash434     std::size_t operator() (config_entry const& entry) const {
435         size_t result = 1;
436         size_t hashValue = 31;
437         result = hashValue*result + std::hash<int> {} (std::get<0>(entry));
438         result = hashValue*result + std::hash<int> {} (std::get<1>(entry));
439         result = hashValue*result + std::hash<int> {} (std::get<2>(entry));
440         result = hashValue*result + std::hash<int> {} (std::get<3>(entry));
441         return result;
442     }
443 };
444 
445 // initialise to some default value
446 uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
447 
logEaselEvent(const char * tag,const char * event)448 static inline void logEaselEvent(const char *tag, const char *event) {
449     if (CC_UNLIKELY(gEaselProfilingEnabled)) {
450         struct timespec ts = {};
451         static int64_t kMsPerSec = 1000;
452         static int64_t kNsPerMs = 1000000;
453         status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
454         if (res != OK) {
455             ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
456         } else {
457             int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
458             ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
459         }
460     }
461 }
462 
463 /*===========================================================================
464  * FUNCTION   : QCamera3HardwareInterface
465  *
466  * DESCRIPTION: constructor of QCamera3HardwareInterface
467  *
468  * PARAMETERS :
469  *   @cameraId  : camera ID
470  *
471  * RETURN     : none
472  *==========================================================================*/
QCamera3HardwareInterface(uint32_t cameraId,const camera_module_callbacks_t * callbacks)473 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
474         const camera_module_callbacks_t *callbacks)
475     : mCameraId(cameraId),
476       mCameraHandle(NULL),
477       mCameraInitialized(false),
478       mCallbackOps(NULL),
479       mMetadataChannel(NULL),
480       mPictureChannel(NULL),
481       mRawChannel(NULL),
482       mSupportChannel(NULL),
483       mAnalysisChannel(NULL),
484       mRawDumpChannel(NULL),
485       mHdrPlusRawSrcChannel(NULL),
486       mDummyBatchChannel(NULL),
487       mDepthChannel(NULL),
488       mDepthCloudMode(CAM_PD_DATA_SKIP),
489       mPerfLockMgr(),
490       mChannelHandle(0),
491       mFirstConfiguration(true),
492       mFlush(false),
493       mFlushPerf(false),
494       mParamHeap(NULL),
495       mParameters(NULL),
496       mPrevParameters(NULL),
497       m_ISTypeVideo(IS_TYPE_NONE),
498       m_bIsVideo(false),
499       m_bIs4KVideo(false),
500       m_bEisSupportedSize(false),
501       m_bEisEnable(false),
502       m_bEis3PropertyEnabled(false),
503       m_bAVTimerEnabled(false),
504       m_MobicatMask(0),
505       mShutterDispatcher(this),
506       mOutputBufferDispatcher(this),
507       mMinProcessedFrameDuration(0),
508       mMinJpegFrameDuration(0),
509       mMinRawFrameDuration(0),
510       mExpectedFrameDuration(0),
511       mExpectedInflightDuration(0),
512       mMetaFrameCount(0U),
513       mUpdateDebugLevel(false),
514       mCallbacks(callbacks),
515       mCaptureIntent(0),
516       mCacMode(0),
517       /* DevCamDebug metadata internal m control*/
518       mDevCamDebugMetaEnable(0),
519       /* DevCamDebug metadata end */
520       mBatchSize(0),
521       mToBeQueuedVidBufs(0),
522       mHFRVideoFps(DEFAULT_VIDEO_FPS),
523       mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
524       mStreamConfig(false),
525       mCommon(),
526       mFirstFrameNumberInBatch(0),
527       mNeedSensorRestart(false),
528       mPreviewStarted(false),
529       mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
530       mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
531       mPDSupported(false),
532       mPDIndex(0),
533       mInstantAEC(false),
534       mResetInstantAEC(false),
535       mInstantAECSettledFrameNumber(0),
536       mAecSkipDisplayFrameBound(0),
537       mInstantAecFrameIdxCount(0),
538       mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
539       mLastRequestedFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF),
540       mLastRequestedOisDataMode(ANDROID_STATISTICS_OIS_DATA_MODE_OFF),
541       mCurrFeatureState(0),
542       mLdafCalibExist(false),
543       mLastCustIntentFrmNum(-1),
544       mFirstMetadataCallback(true),
545       mState(CLOSED),
546       mIsDeviceLinked(false),
547       mIsMainCamera(true),
548       mLinkedCameraId(0),
549       m_pDualCamCmdHeap(NULL),
550       m_pDualCamCmdPtr(NULL),
551       mHdrPlusModeEnabled(false),
552       mZslEnabled(false),
553       mEaselMipiStarted(false),
554       mIsApInputUsedForHdrPlus(false),
555       mFirstPreviewIntentSeen(false),
556       m_bSensorHDREnabled(false),
557       mAfTrigger(),
558       mSceneDistance(-1),
559       mLastFocusDistance(0.0)
560 {
561     getLogLevel();
562     mCommon.init(gCamCapability[cameraId]);
563     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
564 #ifndef USE_HAL_3_3
565     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_5;
566 #else
567     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
568 #endif
569     mCameraDevice.common.close = close_camera_device;
570     mCameraDevice.ops = &mCameraOps;
571     mCameraDevice.priv = this;
572     gCamCapability[cameraId]->version = CAM_HAL_V3;
573     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
574     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
575     gCamCapability[cameraId]->min_num_pp_bufs = 3;
576 
577     PTHREAD_COND_INIT(&mBuffersCond);
578 
579     PTHREAD_COND_INIT(&mRequestCond);
580     mPendingLiveRequest = 0;
581     mCurrentRequestId = -1;
582     pthread_mutex_init(&mMutex, NULL);
583 
584     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
585         mDefaultMetadata[i] = NULL;
586 
587     // Getting system props of different kinds
588     char prop[PROPERTY_VALUE_MAX];
589     memset(prop, 0, sizeof(prop));
590     property_get("persist.camera.raw.dump", prop, "0");
591     mEnableRawDump = atoi(prop);
592     property_get("persist.camera.hal3.force.hdr", prop, "0");
593     mForceHdrSnapshot = atoi(prop);
594 
595     if (mEnableRawDump)
596         LOGD("Raw dump from Camera HAL enabled");
597 
598     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
599     memset(mLdafCalib, 0, sizeof(mLdafCalib));
600 
601     memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
602     mEaselFwUpdated = false;
603 
604     memset(prop, 0, sizeof(prop));
605     property_get("persist.camera.tnr.preview", prop, "0");
606     m_bTnrPreview = (uint8_t)atoi(prop);
607 
608     memset(prop, 0, sizeof(prop));
609     property_get("persist.camera.swtnr.preview", prop, "1");
610     m_bSwTnrPreview = (uint8_t)atoi(prop);
611 
612     memset(prop, 0, sizeof(prop));
613     property_get("persist.camera.tnr.video", prop, "1");
614     m_bTnrVideo = (uint8_t)atoi(prop);
615 
616     memset(prop, 0, sizeof(prop));
617     property_get("persist.camera.avtimer.debug", prop, "0");
618     m_debug_avtimer = (uint8_t)atoi(prop);
619     LOGI("AV timer enabled: %d", m_debug_avtimer);
620 
621     memset(prop, 0, sizeof(prop));
622     property_get("persist.camera.cacmode.disable", prop, "0");
623     m_cacModeDisabled = (uint8_t)atoi(prop);
624 
625     m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
626     m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
627 
628     //Load and read GPU library.
629     lib_surface_utils = NULL;
630     LINK_get_surface_pixel_alignment = NULL;
631     mSurfaceStridePadding = CAM_PAD_TO_64;
632 #ifdef CHECK_GPU_PIXEL_ALIGNMENT
633     lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
634     if (lib_surface_utils) {
635         *(void **)&LINK_get_surface_pixel_alignment =
636                 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
637          if (LINK_get_surface_pixel_alignment) {
638              mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
639          }
640          dlclose(lib_surface_utils);
641     }
642 #endif
643     mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
644     mPDSupported = (0 <= mPDIndex) ? true : false;
645 
646     m60HzZone = is60HzZone();
647 }
648 
649 /*===========================================================================
650  * FUNCTION   : ~QCamera3HardwareInterface
651  *
652  * DESCRIPTION: destructor of QCamera3HardwareInterface
653  *
654  * PARAMETERS : none
655  *
656  * RETURN     : none
657  *==========================================================================*/
~QCamera3HardwareInterface()658 QCamera3HardwareInterface::~QCamera3HardwareInterface()
659 {
660     LOGD("E");
661 
662     int32_t rc = 0;
663 
664     // Clean up Easel error future first to avoid Easel error happens during destructor.
665     cleanupEaselErrorFuture();
666 
667     // Disable power hint and enable the perf lock for close camera
668     mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
669     mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
670 
671     // Close HDR+ client first before destroying HAL.
672     {
673         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
674         finishHdrPlusClientOpeningLocked(l);
675         if (gHdrPlusClient != nullptr) {
676             // Disable HDR+ mode.
677             disableHdrPlusModeLocked();
678             // Disconnect Easel if it's connected.
679             gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
680             gHdrPlusClient = nullptr;
681         }
682     }
683 
684     // unlink of dualcam during close camera
685     if (mIsDeviceLinked) {
686         cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
687                 &m_pDualCamCmdPtr->bundle_info;
688         m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
689         m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
690         pthread_mutex_lock(&gCamLock);
691 
692         if (mIsMainCamera == 1) {
693             m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
694             m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
695             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
696             // related session id should be session id of linked session
697             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
698         } else {
699             m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
700             m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
701             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
702             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
703         }
704         m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
705         pthread_mutex_unlock(&gCamLock);
706 
707         rc = mCameraHandle->ops->set_dual_cam_cmd(
708                 mCameraHandle->camera_handle);
709         if (rc < 0) {
710             LOGE("Dualcam: Unlink failed, but still proceed to close");
711         }
712     }
713 
714     /* We need to stop all streams before deleting any stream */
715     if (mRawDumpChannel) {
716         mRawDumpChannel->stop();
717     }
718 
719     if (mHdrPlusRawSrcChannel) {
720         mHdrPlusRawSrcChannel->stop();
721     }
722 
723     // NOTE: 'camera3_stream_t *' objects are already freed at
724     //        this stage by the framework
725     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
726         it != mStreamInfo.end(); it++) {
727         QCamera3ProcessingChannel *channel = (*it)->channel;
728         if (channel) {
729             channel->stop();
730         }
731     }
732     if (mSupportChannel)
733         mSupportChannel->stop();
734 
735     if (mAnalysisChannel) {
736         mAnalysisChannel->stop();
737     }
738     if (mMetadataChannel) {
739         mMetadataChannel->stop();
740     }
741     if (mChannelHandle) {
742         stopChannelLocked(/*stop_immediately*/false);
743     }
744 
745     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
746         it != mStreamInfo.end(); it++) {
747         QCamera3ProcessingChannel *channel = (*it)->channel;
748         if (channel)
749             delete channel;
750         free (*it);
751     }
752     if (mSupportChannel) {
753         delete mSupportChannel;
754         mSupportChannel = NULL;
755     }
756 
757     if (mAnalysisChannel) {
758         delete mAnalysisChannel;
759         mAnalysisChannel = NULL;
760     }
761     if (mRawDumpChannel) {
762         delete mRawDumpChannel;
763         mRawDumpChannel = NULL;
764     }
765     if (mHdrPlusRawSrcChannel) {
766         delete mHdrPlusRawSrcChannel;
767         mHdrPlusRawSrcChannel = NULL;
768     }
769     if (mDummyBatchChannel) {
770         delete mDummyBatchChannel;
771         mDummyBatchChannel = NULL;
772     }
773 
774     mPictureChannel = NULL;
775     mDepthChannel = NULL;
776 
777     if (mMetadataChannel) {
778         delete mMetadataChannel;
779         mMetadataChannel = NULL;
780     }
781 
782     /* Clean up all channels */
783     if (mCameraInitialized) {
784         if(!mFirstConfiguration){
785             //send the last unconfigure
786             cam_stream_size_info_t stream_config_info;
787             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
788             stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
789             stream_config_info.buffer_info.max_buffers =
790                     m_bIs4KVideo ? 0 :
791                     m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
792             clear_metadata_buffer(mParameters);
793             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
794                     stream_config_info);
795             int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
796             if (rc < 0) {
797                 LOGE("set_parms failed for unconfigure");
798             }
799         }
800         deinitParameters();
801     }
802 
803     if (mChannelHandle) {
804         mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
805                 mChannelHandle);
806         LOGH("deleting channel %d", mChannelHandle);
807         mChannelHandle = 0;
808     }
809 
810     if (mState != CLOSED)
811         closeCamera();
812 
813     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
814         req.mPendingBufferList.clear();
815     }
816     mPendingBuffersMap.mPendingBuffersInRequest.clear();
817     for (pendingRequestIterator i = mPendingRequestsList.begin();
818             i != mPendingRequestsList.end();) {
819         i = erasePendingRequest(i);
820     }
821     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
822         if (mDefaultMetadata[i])
823             free_camera_metadata(mDefaultMetadata[i]);
824 
825     mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
826 
827     pthread_cond_destroy(&mRequestCond);
828 
829     pthread_cond_destroy(&mBuffersCond);
830 
831     pthread_mutex_destroy(&mMutex);
832     LOGD("X");
833 }
834 
835 /*===========================================================================
836  * FUNCTION   : erasePendingRequest
837  *
838  * DESCRIPTION: function to erase a desired pending request after freeing any
839  *              allocated memory
840  *
841  * PARAMETERS :
842  *   @i       : iterator pointing to pending request to be erased
843  *
844  * RETURN     : iterator pointing to the next request
845  *==========================================================================*/
846 QCamera3HardwareInterface::pendingRequestIterator
erasePendingRequest(pendingRequestIterator i)847         QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
848 {
849     if (i->input_buffer != NULL) {
850         free(i->input_buffer);
851         i->input_buffer = NULL;
852     }
853     if (i->settings != NULL)
854         free_camera_metadata((camera_metadata_t*)i->settings);
855 
856     mExpectedInflightDuration -= i->expectedFrameDuration;
857     if (mExpectedInflightDuration < 0) {
858         LOGE("Negative expected in-flight duration!");
859         mExpectedInflightDuration = 0;
860     }
861 
862     return mPendingRequestsList.erase(i);
863 }
864 
865 /*===========================================================================
866  * FUNCTION   : camEvtHandle
867  *
868  * DESCRIPTION: Function registered to mm-camera-interface to handle events
869  *
870  * PARAMETERS :
871  *   @camera_handle : interface layer camera handle
872  *   @evt           : ptr to event
873  *   @user_data     : user data ptr
874  *
875  * RETURN     : none
876  *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)877 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
878                                           mm_camera_event_t *evt,
879                                           void *user_data)
880 {
881     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
882     if (obj && evt) {
883         switch(evt->server_event_type) {
884             case CAM_EVENT_TYPE_DAEMON_DIED:
885                 pthread_mutex_lock(&obj->mMutex);
886                 obj->mState = ERROR;
887                 pthread_mutex_unlock(&obj->mMutex);
888                 LOGE("Fatal, camera daemon died");
889                 break;
890 
891             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
892                 LOGD("HAL got request pull from Daemon");
893                 pthread_mutex_lock(&obj->mMutex);
894                 obj->mWokenUpByDaemon = true;
895                 obj->unblockRequestIfNecessary();
896                 pthread_mutex_unlock(&obj->mMutex);
897                 break;
898 
899             default:
900                 LOGW("Warning: Unhandled event %d",
901                         evt->server_event_type);
902                 break;
903         }
904     } else {
905         LOGE("NULL user_data/evt");
906     }
907 }
908 
909 /*===========================================================================
910  * FUNCTION   : openCamera
911  *
912  * DESCRIPTION: open camera
913  *
914  * PARAMETERS :
915  *   @hw_device  : double ptr for camera device struct
916  *
917  * RETURN     : int32_t type of status
918  *              NO_ERROR  -- success
919  *              none-zero failure code
920  *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)921 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
922 {
923     int rc = 0;
924     if (mState != CLOSED) {
925         *hw_device = NULL;
926         return PERMISSION_DENIED;
927     }
928 
929     logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
930     mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
931     LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
932              mCameraId);
933 
934     if (mCameraHandle) {
935         LOGE("Failure: Camera already opened");
936         return ALREADY_EXISTS;
937     }
938 
939     {
940         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
941         if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
942             logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
943             if (gActiveEaselClient == 0) {
944                 rc = gEaselManagerClient->resume(this);
945                 if (rc != 0) {
946                     ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
947                     return rc;
948                 }
949                 mEaselFwUpdated = false;
950             }
951             gActiveEaselClient++;
952 
953             mQCamera3HdrPlusListenerThread = new QCamera3HdrPlusListenerThread(this);
954             rc = mQCamera3HdrPlusListenerThread->run("QCamera3HdrPlusListenerThread");
955             if (rc != OK) {
956                 ALOGE("%s: Starting HDR+ client listener thread failed: %s (%d)", __FUNCTION__,
957                         strerror(-rc), rc);
958                 return rc;
959             }
960         }
961     }
962 
963     rc = openCamera();
964     if (rc == 0) {
965         *hw_device = &mCameraDevice.common;
966     } else {
967         *hw_device = NULL;
968 
969         // Suspend Easel because opening camera failed.
970         {
971             std::unique_lock<std::mutex> l(gHdrPlusClientLock);
972             if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
973                 if (gActiveEaselClient == 1) {
974                     status_t suspendErr = gEaselManagerClient->suspend();
975                     if (suspendErr != 0) {
976                         ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
977                                 strerror(-suspendErr), suspendErr);
978                     }
979                 }
980                 gActiveEaselClient--;
981             }
982 
983             if (mQCamera3HdrPlusListenerThread != nullptr) {
984                 mQCamera3HdrPlusListenerThread->requestExit();
985                 mQCamera3HdrPlusListenerThread->join();
986                 mQCamera3HdrPlusListenerThread = nullptr;
987             }
988         }
989     }
990 
991     LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
992              mCameraId, rc);
993 
994     if (rc == NO_ERROR) {
995         mState = OPENED;
996     }
997 
998     return rc;
999 }
1000 
1001 /*===========================================================================
1002  * FUNCTION   : openCamera
1003  *
1004  * DESCRIPTION: open camera
1005  *
1006  * PARAMETERS : none
1007  *
1008  * RETURN     : int32_t type of status
1009  *              NO_ERROR  -- success
1010  *              none-zero failure code
1011  *==========================================================================*/
openCamera()1012 int QCamera3HardwareInterface::openCamera()
1013 {
1014     int rc = 0;
1015     char value[PROPERTY_VALUE_MAX];
1016 
1017     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
1018 
1019     rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
1020     if (rc < 0) {
1021         LOGE("Failed to reserve flash for camera id: %d",
1022                 mCameraId);
1023         return UNKNOWN_ERROR;
1024     }
1025 
1026     rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
1027     if (rc) {
1028         LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
1029         return rc;
1030     }
1031 
1032     if (!mCameraHandle) {
1033         LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
1034         return -ENODEV;
1035     }
1036 
1037     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
1038             camEvtHandle, (void *)this);
1039 
1040     if (rc < 0) {
1041         LOGE("Error, failed to register event callback");
1042         /* Not closing camera here since it is already handled in destructor */
1043         return FAILED_TRANSACTION;
1044     }
1045 
1046     mExifParams.debug_params =
1047             (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
1048     if (mExifParams.debug_params) {
1049         memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
1050     } else {
1051         LOGE("Out of Memory. Allocation failed for 3A debug exif params");
1052         return NO_MEMORY;
1053     }
1054     mFirstConfiguration = true;
1055 
1056     //Notify display HAL that a camera session is active.
1057     //But avoid calling the same during bootup because camera service might open/close
1058     //cameras at boot time during its initialization and display service will also internally
1059     //wait for camera service to initialize first while calling this display API, resulting in a
1060     //deadlock situation. Since boot time camera open/close calls are made only to fetch
1061     //capabilities, no need of this display bw optimization.
1062     //Use "service.bootanim.exit" property to know boot status.
1063     property_get("service.bootanim.exit", value, "0");
1064     if (atoi(value) == 1) {
1065         pthread_mutex_lock(&gCamLock);
1066         if (gNumCameraSessions++ == 0) {
1067             setCameraLaunchStatus(true);
1068         }
1069         pthread_mutex_unlock(&gCamLock);
1070     }
1071 
1072     //fill the session id needed while linking dual cam
1073     pthread_mutex_lock(&gCamLock);
1074     rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1075         &sessionId[mCameraId]);
1076     pthread_mutex_unlock(&gCamLock);
1077 
1078     if (rc < 0) {
1079         LOGE("Error, failed to get sessiion id");
1080         return UNKNOWN_ERROR;
1081     } else {
1082         //Allocate related cam sync buffer
1083         //this is needed for the payload that goes along with bundling cmd for related
1084         //camera use cases
1085         m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1086         rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
1087         if(rc != OK) {
1088             rc = NO_MEMORY;
1089             LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1090             return NO_MEMORY;
1091         }
1092 
1093         //Map memory for related cam sync buffer
1094         rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1095                 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1096                 m_pDualCamCmdHeap->getFd(0),
1097                 sizeof(cam_dual_camera_cmd_info_t),
1098                 m_pDualCamCmdHeap->getPtr(0));
1099         if(rc < 0) {
1100             LOGE("Dualcam: failed to map Related cam sync buffer");
1101             rc = FAILED_TRANSACTION;
1102             return NO_MEMORY;
1103         }
1104         m_pDualCamCmdPtr =
1105                 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
1106     }
1107 
1108     LOGH("mCameraId=%d",mCameraId);
1109 
1110     return NO_ERROR;
1111 }
1112 
1113 /*===========================================================================
1114  * FUNCTION   : closeCamera
1115  *
1116  * DESCRIPTION: close camera
1117  *
1118  * PARAMETERS : none
1119  *
1120  * RETURN     : int32_t type of status
1121  *              NO_ERROR  -- success
1122  *              none-zero failure code
1123  *==========================================================================*/
closeCamera()1124 int QCamera3HardwareInterface::closeCamera()
1125 {
1126     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
1127     int rc = NO_ERROR;
1128     char value[PROPERTY_VALUE_MAX];
1129 
1130     LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1131              mCameraId);
1132 
1133     // unmap memory for related cam sync buffer
1134     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1135             CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
1136     if (NULL != m_pDualCamCmdHeap) {
1137         m_pDualCamCmdHeap->deallocate();
1138         delete m_pDualCamCmdHeap;
1139         m_pDualCamCmdHeap = NULL;
1140         m_pDualCamCmdPtr = NULL;
1141     }
1142 
1143     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1144     mCameraHandle = NULL;
1145 
1146     //reset session id to some invalid id
1147     pthread_mutex_lock(&gCamLock);
1148     sessionId[mCameraId] = 0xDEADBEEF;
1149     pthread_mutex_unlock(&gCamLock);
1150 
1151     //Notify display HAL that there is no active camera session
1152     //but avoid calling the same during bootup. Refer to openCamera
1153     //for more details.
1154     property_get("service.bootanim.exit", value, "0");
1155     if (atoi(value) == 1) {
1156         pthread_mutex_lock(&gCamLock);
1157         if (--gNumCameraSessions == 0) {
1158             setCameraLaunchStatus(false);
1159         }
1160         pthread_mutex_unlock(&gCamLock);
1161     }
1162 
1163     if (mExifParams.debug_params) {
1164         free(mExifParams.debug_params);
1165         mExifParams.debug_params = NULL;
1166     }
1167     if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1168         LOGW("Failed to release flash for camera id: %d",
1169                 mCameraId);
1170     }
1171     mState = CLOSED;
1172     LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1173          mCameraId, rc);
1174 
1175     {
1176         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1177         if (EaselManagerClientOpened) {
1178             if (gActiveEaselClient == 1) {
1179                 rc = gEaselManagerClient->suspend();
1180                 if (rc != 0) {
1181                     ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1182                 }
1183             }
1184             gActiveEaselClient--;
1185         }
1186 
1187         if (mQCamera3HdrPlusListenerThread != nullptr) {
1188             mQCamera3HdrPlusListenerThread->requestExit();
1189             mQCamera3HdrPlusListenerThread->join();
1190             mQCamera3HdrPlusListenerThread = nullptr;
1191         }
1192     }
1193 
1194     return rc;
1195 }
1196 
1197 /*===========================================================================
1198  * FUNCTION   : initialize
1199  *
1200  * DESCRIPTION: Initialize frameworks callback functions
1201  *
1202  * PARAMETERS :
1203  *   @callback_ops : callback function to frameworks
1204  *
1205  * RETURN     :
1206  *
1207  *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)1208 int QCamera3HardwareInterface::initialize(
1209         const struct camera3_callback_ops *callback_ops)
1210 {
1211     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
1212     int rc;
1213 
1214     LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1215     pthread_mutex_lock(&mMutex);
1216 
1217     // Validate current state
1218     switch (mState) {
1219         case OPENED:
1220             /* valid state */
1221             break;
1222         default:
1223             LOGE("Invalid state %d", mState);
1224             rc = -ENODEV;
1225             goto err1;
1226     }
1227 
1228     rc = initParameters();
1229     if (rc < 0) {
1230         LOGE("initParamters failed %d", rc);
1231         goto err1;
1232     }
1233     mCallbackOps = callback_ops;
1234 
1235     mChannelHandle = mCameraHandle->ops->add_channel(
1236             mCameraHandle->camera_handle, NULL, NULL, this);
1237     if (mChannelHandle == 0) {
1238         LOGE("add_channel failed");
1239         rc = -ENOMEM;
1240         pthread_mutex_unlock(&mMutex);
1241         return rc;
1242     }
1243 
1244     pthread_mutex_unlock(&mMutex);
1245     mCameraInitialized = true;
1246     mState = INITIALIZED;
1247     LOGI("X");
1248     return 0;
1249 
1250 err1:
1251     pthread_mutex_unlock(&mMutex);
1252     return rc;
1253 }
1254 
1255 /*===========================================================================
1256  * FUNCTION   : validateStreamDimensions
1257  *
1258  * DESCRIPTION: Check if the configuration requested are those advertised
1259  *
1260  * PARAMETERS :
1261  *   @cameraId : cameraId
1262  *   @stream_list : streams to be configured
1263  *
1264  * RETURN     :
1265  *
1266  *==========================================================================*/
validateStreamDimensions(uint32_t cameraId,camera3_stream_configuration_t * streamList)1267 int QCamera3HardwareInterface::validateStreamDimensions(uint32_t cameraId,
1268         camera3_stream_configuration_t *streamList)
1269 {
1270     int rc = NO_ERROR;
1271     size_t count = 0;
1272     uint32_t depthWidth = 0;
1273     uint32_t depthHeight = 0;
1274     auto pDIndex = getPDStatIndex(gCamCapability[cameraId]);
1275     bool pDSupported = (0 <= pDIndex) ? true : false;
1276     if (pDSupported) {
1277         depthWidth = gCamCapability[cameraId]->raw_meta_dim[pDIndex].width;
1278         depthHeight = gCamCapability[cameraId]->raw_meta_dim[pDIndex].height;
1279     }
1280 
1281     camera3_stream_t *inputStream = NULL;
1282     /*
1283     * Loop through all streams to find input stream if it exists*
1284     */
1285     for (size_t i = 0; i< streamList->num_streams; i++) {
1286         if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1287             if (inputStream != NULL) {
1288                 LOGE("Error, Multiple input streams requested");
1289                 return -EINVAL;
1290             }
1291             inputStream = streamList->streams[i];
1292         }
1293     }
1294     /*
1295     * Loop through all streams requested in configuration
1296     * Check if unsupported sizes have been requested on any of them
1297     */
1298     for (size_t j = 0; j < streamList->num_streams; j++) {
1299         bool sizeFound = false;
1300         camera3_stream_t *newStream = streamList->streams[j];
1301 
1302         uint32_t rotatedHeight = newStream->height;
1303         uint32_t rotatedWidth = newStream->width;
1304         if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1305                 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1306             rotatedHeight = newStream->width;
1307             rotatedWidth = newStream->height;
1308         }
1309 
1310         /*
1311         * Sizes are different for each type of stream format check against
1312         * appropriate table.
1313         */
1314         switch (newStream->format) {
1315         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1316         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1317         case HAL_PIXEL_FORMAT_RAW10:
1318             if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1319                     (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1320                     pDSupported) {
1321                 if ((depthWidth == newStream->width) &&
1322                         (depthHeight == newStream->height)) {
1323                     sizeFound = true;
1324                 }
1325                 break;
1326             }
1327             count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1328             for (size_t i = 0; i < count; i++) {
1329                 if ((gCamCapability[cameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1330                         (gCamCapability[cameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1331                     sizeFound = true;
1332                     break;
1333                 }
1334             }
1335             break;
1336         case HAL_PIXEL_FORMAT_BLOB:
1337             if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1338                     pDSupported) {
1339                 //As per spec. depth cloud should be sample count / 16
1340                 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
1341                 if ((depthSamplesCount == newStream->width) &&
1342                         (1 == newStream->height)) {
1343                     sizeFound = true;
1344                 }
1345                 break;
1346             }
1347             count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1348             /* Verify set size against generated sizes table */
1349             for (size_t i = 0; i < count; i++) {
1350                 if (((int32_t)rotatedWidth ==
1351                         gCamCapability[cameraId]->picture_sizes_tbl[i].width) &&
1352                         ((int32_t)rotatedHeight ==
1353                         gCamCapability[cameraId]->picture_sizes_tbl[i].height)) {
1354                     sizeFound = true;
1355                     break;
1356                 }
1357             }
1358             break;
1359         case HAL_PIXEL_FORMAT_Y8:
1360         case HAL_PIXEL_FORMAT_YCbCr_420_888:
1361         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1362         default:
1363             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1364                     || newStream->stream_type == CAMERA3_STREAM_INPUT
1365                     || IS_USAGE_ZSL(newStream->usage)) {
1366                 if (((int32_t)rotatedWidth ==
1367                                 gCamCapability[cameraId]->active_array_size.width) &&
1368                                 ((int32_t)rotatedHeight ==
1369                                 gCamCapability[cameraId]->active_array_size.height)) {
1370                     sizeFound = true;
1371                     break;
1372                 }
1373                 /* We could potentially break here to enforce ZSL stream
1374                  * set from frameworks always is full active array size
1375                  * but it is not clear from the spc if framework will always
1376                  * follow that, also we have logic to override to full array
1377                  * size, so keeping the logic lenient at the moment
1378                  */
1379             }
1380             count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1381                     MAX_SIZES_CNT);
1382             for (size_t i = 0; i < count; i++) {
1383                 if (((int32_t)rotatedWidth ==
1384                             gCamCapability[cameraId]->picture_sizes_tbl[i].width) &&
1385                             ((int32_t)rotatedHeight ==
1386                             gCamCapability[cameraId]->picture_sizes_tbl[i].height)) {
1387                     sizeFound = true;
1388                     break;
1389                 }
1390             }
1391             break;
1392         } /* End of switch(newStream->format) */
1393 
1394         /* We error out even if a single stream has unsupported size set */
1395         if (!sizeFound) {
1396             LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1397                     rotatedWidth, rotatedHeight, newStream->format,
1398                     gCamCapability[cameraId]->active_array_size.width,
1399                     gCamCapability[cameraId]->active_array_size.height);
1400             rc = -EINVAL;
1401             break;
1402         }
1403     } /* End of for each stream */
1404     return rc;
1405 }
1406 
1407 /*===========================================================================
1408  * FUNCTION   : validateUsageFlags
1409  *
1410  * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1411  *
1412  * PARAMETERS :
1413  *   @cameraId : camera id.
1414  *   @stream_list : streams to be configured
1415  *
1416  * RETURN     :
1417  *   NO_ERROR if the usage flags are supported
1418  *   error code if usage flags are not supported
1419  *
1420  *==========================================================================*/
validateUsageFlags(uint32_t cameraId,const camera3_stream_configuration_t * streamList)1421 int QCamera3HardwareInterface::validateUsageFlags(uint32_t cameraId,
1422         const camera3_stream_configuration_t* streamList)
1423 {
1424     for (size_t j = 0; j < streamList->num_streams; j++) {
1425         const camera3_stream_t *newStream = streamList->streams[j];
1426 
1427         if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1428             (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1429              newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1430             continue;
1431         }
1432 
1433         // Here we only care whether it's EIS3 or not
1434         char is_type_value[PROPERTY_VALUE_MAX];
1435         property_get("persist.camera.is_type", is_type_value, "4");
1436         cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1437         if (gCamCapability[cameraId]->position == CAM_POSITION_FRONT ||
1438                 streamList->operation_mode ==
1439                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1440             isType = IS_TYPE_NONE;
1441 
1442         bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1443         bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1444         bool isZSL = IS_USAGE_ZSL(newStream->usage);
1445         bool forcePreviewUBWC = true;
1446         if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1447             forcePreviewUBWC = false;
1448         }
1449         cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1450                 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
1451         cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1452                 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
1453         cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1454                 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
1455 
1456         // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1457         // So color spaces will always match.
1458 
1459         // Check whether underlying formats of shared streams match.
1460         if (isVideo && isPreview && videoFormat != previewFormat) {
1461             LOGE("Combined video and preview usage flag is not supported");
1462             return -EINVAL;
1463         }
1464         if (isPreview && isZSL && previewFormat != zslFormat) {
1465             LOGE("Combined preview and zsl usage flag is not supported");
1466             return -EINVAL;
1467         }
1468         if (isVideo && isZSL && videoFormat != zslFormat) {
1469             LOGE("Combined video and zsl usage flag is not supported");
1470             return -EINVAL;
1471         }
1472     }
1473     return NO_ERROR;
1474 }
1475 
1476 /*===========================================================================
1477  * FUNCTION   : validateUsageFlagsForEis
1478  *
1479  * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1480  *
1481  * PARAMETERS :
1482  *   @bEisEnable : Flag indicated that EIS is enabled.
1483  *   @bEisSupportedSize : Flag indicating that there is a preview/video stream
1484  *                        within the EIS supported size.
1485  *   @stream_list : streams to be configured
1486  *
1487  * RETURN     :
1488  *   NO_ERROR if the usage flags are supported
1489  *   error code if usage flags are not supported
1490  *
1491  *==========================================================================*/
validateUsageFlagsForEis(bool bEisEnable,bool bEisSupportedSize,const camera3_stream_configuration_t * streamList)1492 int QCamera3HardwareInterface::validateUsageFlagsForEis(bool bEisEnable, bool bEisSupportedSize,
1493         const camera3_stream_configuration_t* streamList)
1494 {
1495     for (size_t j = 0; j < streamList->num_streams; j++) {
1496         const camera3_stream_t *newStream = streamList->streams[j];
1497 
1498         bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1499         bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1500 
1501         // Because EIS is "hard-coded" for certain use case, and current
1502        // implementation doesn't support shared preview and video on the same
1503         // stream, return failure if EIS is forced on.
1504         if (isPreview && isVideo && bEisEnable && bEisSupportedSize) {
1505             LOGE("Combined video and preview usage flag is not supported due to EIS");
1506             return -EINVAL;
1507         }
1508     }
1509     return NO_ERROR;
1510 }
1511 
1512 /*==============================================================================
1513  * FUNCTION   : isSupportChannelNeeded
1514  *
1515  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1516  *
1517  * PARAMETERS :
1518  *   @stream_list : streams to be configured
1519  *   @stream_config_info : the config info for streams to be configured
1520  *
1521  * RETURN     : Boolen true/false decision
1522  *
1523  *==========================================================================*/
isSupportChannelNeeded(camera3_stream_configuration_t * streamList,cam_stream_size_info_t stream_config_info)1524 bool QCamera3HardwareInterface::isSupportChannelNeeded(
1525         camera3_stream_configuration_t *streamList,
1526         cam_stream_size_info_t stream_config_info)
1527 {
1528     uint32_t i;
1529     bool pprocRequested = false;
1530     /* Check for conditions where PProc pipeline does not have any streams*/
1531     for (i = 0; i < stream_config_info.num_streams; i++) {
1532         if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1533                 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1534             pprocRequested = true;
1535             break;
1536         }
1537     }
1538 
1539     if (pprocRequested == false )
1540         return true;
1541 
1542     /* Dummy stream needed if only raw or jpeg streams present */
1543     for (i = 0; i < streamList->num_streams; i++) {
1544         switch(streamList->streams[i]->format) {
1545             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1546             case HAL_PIXEL_FORMAT_RAW10:
1547             case HAL_PIXEL_FORMAT_RAW16:
1548             case HAL_PIXEL_FORMAT_BLOB:
1549                 break;
1550             default:
1551                 return false;
1552         }
1553     }
1554     return true;
1555 }
1556 
1557 /*==============================================================================
1558  * FUNCTION   : sensor_mode_info
1559  *
1560  * DESCRIPTION: Get sensor mode information based on current stream configuratoin
1561  *
1562  * PARAMETERS :
1563  *   @sensor_mode_info : sensor mode information (output)
1564  *
1565  * RETURN     : int32_t type of status
1566  *              NO_ERROR  -- success
1567  *              none-zero failure code
1568  *
1569  *==========================================================================*/
getSensorModeInfo(cam_sensor_mode_info_t & sensorModeInfo)1570 int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1571 {
1572     int32_t rc = NO_ERROR;
1573 
1574     cam_dimension_t max_dim = {0, 0};
1575     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1576         if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1577             max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1578         if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1579             max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1580     }
1581 
1582     clear_metadata_buffer(mParameters);
1583 
1584     rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1585             max_dim);
1586     if (rc != NO_ERROR) {
1587         LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1588         return rc;
1589     }
1590 
1591     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1592     if (rc != NO_ERROR) {
1593         LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1594         return rc;
1595     }
1596 
1597     clear_metadata_buffer(mParameters);
1598     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
1599 
1600     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1601             mParameters);
1602     if (rc != NO_ERROR) {
1603         LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1604         return rc;
1605     }
1606 
1607     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
1608     LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1609             "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1610             sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1611             sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1612             sensorModeInfo.num_raw_bits);
1613 
1614     return rc;
1615 }
1616 
1617 /*==============================================================================
1618  * FUNCTION   : getCurrentSensorModeInfo
1619  *
1620  * DESCRIPTION: Get sensor mode information that is currently selected.
1621  *
1622  * PARAMETERS :
1623  *   @sensorModeInfo : sensor mode information (output)
1624  *
1625  * RETURN     : int32_t type of status
1626  *              NO_ERROR  -- success
1627  *              none-zero failure code
1628  *
1629  *==========================================================================*/
getCurrentSensorModeInfo(cam_sensor_mode_info_t & sensorModeInfo)1630 int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1631 {
1632     int32_t rc = NO_ERROR;
1633 
1634     metadata_buffer_t *cachedParameters = (metadata_buffer_t *) malloc(sizeof(metadata_buffer_t));
1635     if (nullptr == cachedParameters) {
1636         return NO_MEMORY;
1637     }
1638 
1639     memcpy(cachedParameters, mParameters, sizeof(metadata_buffer_t));
1640 
1641     clear_metadata_buffer(mParameters);
1642     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1643 
1644     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1645             mParameters);
1646     if (rc != NO_ERROR) {
1647         LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1648         free(cachedParameters);
1649         return rc;
1650     }
1651 
1652     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1653     LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1654             "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1655             sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1656             sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1657             sensorModeInfo.num_raw_bits);
1658 
1659     memcpy(mParameters, cachedParameters, sizeof(metadata_buffer_t));
1660     free(cachedParameters);
1661 
1662     return rc;
1663 }
1664 
1665 /*==============================================================================
1666  * FUNCTION   : addToPPFeatureMask
1667  *
1668  * DESCRIPTION: add additional features to pp feature mask based on
1669  *              stream type and usecase
1670  *
1671  * PARAMETERS :
1672  *   @stream_format : stream type for feature mask
1673  *   @stream_idx : stream idx within postprocess_mask list to change
1674  *
1675  * RETURN     : NULL
1676  *
1677  *==========================================================================*/
addToPPFeatureMask(int stream_format,uint32_t stream_idx)1678 void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1679         uint32_t stream_idx)
1680 {
1681     char feature_mask_value[PROPERTY_VALUE_MAX];
1682     cam_feature_mask_t feature_mask;
1683     int args_converted;
1684     int property_len;
1685 
1686     /* Get feature mask from property */
1687 #ifdef _LE_CAMERA_
1688     char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1689     snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1690     property_len = property_get("persist.camera.hal3.feature",
1691             feature_mask_value, swtnr_feature_mask_value);
1692 #else
1693     property_len = property_get("persist.camera.hal3.feature",
1694             feature_mask_value, "0");
1695 #endif
1696     if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1697             (feature_mask_value[1] == 'x')) {
1698         args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1699     } else {
1700         args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1701     }
1702     if (1 != args_converted) {
1703         feature_mask = 0;
1704         LOGE("Wrong feature mask %s", feature_mask_value);
1705         return;
1706     }
1707 
1708     switch (stream_format) {
1709     case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1710         /* Add LLVD to pp feature mask only if video hint is enabled */
1711         if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1712             mStreamConfigInfo.postprocess_mask[stream_idx]
1713                     |= CAM_QTI_FEATURE_SW_TNR;
1714             LOGH("Added SW TNR to pp feature mask");
1715         } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1716             mStreamConfigInfo.postprocess_mask[stream_idx]
1717                     |= CAM_QCOM_FEATURE_LLVD;
1718             LOGH("Added LLVD SeeMore to pp feature mask");
1719         }
1720         if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1721                 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1722             mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1723         }
1724         if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1725                 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1726             mStreamConfigInfo.postprocess_mask[stream_idx] |=
1727                     CAM_QTI_FEATURE_BINNING_CORRECTION;
1728         }
1729         break;
1730     }
1731     default:
1732         break;
1733     }
1734     LOGD("PP feature mask %llx",
1735             mStreamConfigInfo.postprocess_mask[stream_idx]);
1736 }
1737 
1738 /*==============================================================================
1739  * FUNCTION   : updateFpsInPreviewBuffer
1740  *
1741  * DESCRIPTION: update FPS information in preview buffer.
1742  *
1743  * PARAMETERS :
1744  *   @metadata    : pointer to metadata buffer
1745  *   @frame_number: frame_number to look for in pending buffer list
1746  *
1747  * RETURN     : None
1748  *
1749  *==========================================================================*/
updateFpsInPreviewBuffer(metadata_buffer_t * metadata,uint32_t frame_number)1750 void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1751         uint32_t frame_number)
1752 {
1753     // Mark all pending buffers for this particular request
1754     // with corresponding framerate information
1755     for (List<PendingBuffersInRequest>::iterator req =
1756             mPendingBuffersMap.mPendingBuffersInRequest.begin();
1757             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1758         for(List<PendingBufferInfo>::iterator j =
1759                 req->mPendingBufferList.begin();
1760                 j != req->mPendingBufferList.end(); j++) {
1761             QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1762             if ((req->frame_number == frame_number) &&
1763                 (channel->getStreamTypeMask() &
1764                 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1765                 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1766                     CAM_INTF_PARM_FPS_RANGE, metadata) {
1767                     typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1768                     struct private_handle_t *priv_handle =
1769                         (struct private_handle_t *)(*(j->buffer));
1770                     setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1771                 }
1772             }
1773         }
1774     }
1775 }
1776 
1777 /*==============================================================================
1778  * FUNCTION   : updateTimeStampInPendingBuffers
1779  *
1780  * DESCRIPTION: update timestamp in display metadata for all pending buffers
1781  *              of a frame number
1782  *
1783  * PARAMETERS :
1784  *   @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1785  *   @timestamp   : timestamp to be set
1786  *
1787  * RETURN     : None
1788  *
1789  *==========================================================================*/
updateTimeStampInPendingBuffers(uint32_t frameNumber,nsecs_t timestamp)1790 void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1791         uint32_t frameNumber, nsecs_t timestamp)
1792 {
1793     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1794             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1795         // WAR: save the av_timestamp to the next frame
1796         if(req->frame_number == frameNumber + 1) {
1797             req->av_timestamp = timestamp;
1798         }
1799 
1800         if (req->frame_number != frameNumber)
1801             continue;
1802 
1803         for (auto k = req->mPendingBufferList.begin();
1804                 k != req->mPendingBufferList.end(); k++ ) {
1805             // WAR: update timestamp when it's not VT usecase
1806             QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1807             if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1808                 m_bAVTimerEnabled)) {
1809                     struct private_handle_t *priv_handle =
1810                         (struct private_handle_t *) (*(k->buffer));
1811                     setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1812             }
1813         }
1814     }
1815     return;
1816 }
1817 
1818 /*===========================================================================
1819  * FUNCTION   : configureStreams
1820  *
1821  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1822  *              and output streams.
1823  *
1824  * PARAMETERS :
1825  *   @stream_list : streams to be configured
1826  *
1827  * RETURN     :
1828  *
1829  *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)1830 int QCamera3HardwareInterface::configureStreams(
1831         camera3_stream_configuration_t *streamList)
1832 {
1833     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
1834     int rc = 0;
1835 
1836     // Acquire perfLock before configure streams
1837     mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
1838     rc = configureStreamsPerfLocked(streamList);
1839     mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
1840 
1841     return rc;
1842 }
1843 
1844 /*===========================================================================
1845  * FUNCTION   : validateStreamCombination
1846  *
1847  * DESCRIPTION: Validate a given stream combination.
1848  *
1849  * PARAMETERS :
1850  *   @cameraId : camera Id.
1851  *   @stream_list : stream combination to be validated.
1852  *   @status : validation status.
1853  *
1854  * RETURN     : int32_t type of status
1855  *              NO_ERROR  -- success
1856  *              none-zero failure code
1857  *==========================================================================*/
validateStreamCombination(uint32_t cameraId,camera3_stream_configuration_t * streamList,StreamValidateStatus * status)1858 int32_t QCamera3HardwareInterface::validateStreamCombination(uint32_t cameraId,
1859         camera3_stream_configuration_t *streamList /*in*/, StreamValidateStatus *status /*out*/)
1860 {
1861     bool isJpeg = false;
1862     bool bJpegExceeds4K = false;
1863     bool bJpegOnEncoder = false;
1864     uint32_t width_ratio;
1865     uint32_t height_ratio;
1866     size_t rawStreamCnt = 0;
1867     size_t stallStreamCnt = 0;
1868     size_t processedStreamCnt = 0;
1869     size_t pdStatCount = 0;
1870     size_t numYuv888OnEncoder = 0;
1871     cam_dimension_t jpegSize = {0, 0};
1872     camera3_stream_t *zslStream = nullptr;
1873     uint32_t maxEisWidth = 0;
1874     uint32_t maxEisHeight = 0;
1875 
1876     if (status == nullptr) {
1877         LOGE("NULL stream status");
1878         return BAD_VALUE;
1879     }
1880 
1881     // Sanity check stream_list
1882     if (streamList == NULL) {
1883         LOGE("NULL stream configuration");
1884         return BAD_VALUE;
1885     }
1886     if (streamList->streams == NULL) {
1887         LOGE("NULL stream list");
1888         return BAD_VALUE;
1889     }
1890 
1891     if (streamList->num_streams < 1) {
1892         LOGE("Bad number of streams requested: %d",
1893                 streamList->num_streams);
1894         return BAD_VALUE;
1895     }
1896 
1897     if (streamList->num_streams >= MAX_NUM_STREAMS) {
1898         LOGE("Maximum number of streams %d exceeded: %d",
1899                 MAX_NUM_STREAMS, streamList->num_streams);
1900         return BAD_VALUE;
1901     }
1902 
1903     auto rc = validateUsageFlags(cameraId, streamList);
1904     if (rc != NO_ERROR) {
1905         return rc;
1906     }
1907 
1908     rc = validateStreamDimensions(cameraId, streamList);
1909     if (rc == NO_ERROR) {
1910         rc = validateStreamRotations(streamList);
1911     }
1912     if (rc != NO_ERROR) {
1913         LOGE("Invalid stream configuration requested!");
1914         return rc;
1915     }
1916 
1917     size_t count = IS_TYPE_MAX;
1918     count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
1919     for (size_t i = 0; i < count; i++) {
1920         if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
1921             (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1922             status->bEisSupported = true;
1923             break;
1924         }
1925     }
1926 
1927     if (status->bEisSupported) {
1928         maxEisWidth = MAX_EIS_WIDTH;
1929         maxEisHeight = MAX_EIS_HEIGHT;
1930     }
1931 
1932     status->maxViewfinderSize = gCamCapability[cameraId]->max_viewfinder_size;
1933     status->largeYuv888Size = {0, 0};
1934     /* stream configurations */
1935     for (size_t i = 0; i < streamList->num_streams; i++) {
1936         camera3_stream_t *newStream = streamList->streams[i];
1937         LOGI("stream[%d] type = %d, format = %d, width = %d, "
1938                 "height = %d, rotation = %d, usage = 0x%x",
1939                  i, newStream->stream_type, newStream->format,
1940                 newStream->width, newStream->height, newStream->rotation,
1941                 newStream->usage);
1942         if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1943             status->isZsl = true;
1944             status->inputStream = newStream;
1945         }
1946 
1947         if (IS_USAGE_ZSL(newStream->usage)) {
1948             if (zslStream != nullptr) {
1949                 LOGE("Multiple input/reprocess streams requested!");
1950                 return BAD_VALUE;
1951             }
1952             zslStream = newStream;
1953         }
1954 
1955         if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1956                 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
1957             isJpeg = true;
1958             jpegSize.width = newStream->width;
1959             jpegSize.height = newStream->height;
1960             if (newStream->width > VIDEO_4K_WIDTH ||
1961                     newStream->height > VIDEO_4K_HEIGHT)
1962                 bJpegExceeds4K = true;
1963         }
1964 
1965         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1966                 (IS_USAGE_PREVIEW(newStream->usage) || IS_USAGE_VIDEO(newStream->usage))) {
1967             if (IS_USAGE_VIDEO(newStream->usage)) {
1968                 status->bIsVideo = true;
1969                 // In HAL3 we can have multiple different video streams.
1970                 // The variables video width and height are used below as
1971                 // dimensions of the biggest of them
1972                 if (status->videoWidth < newStream->width ||
1973                         status->videoHeight < newStream->height) {
1974                     status->videoWidth = newStream->width;
1975                     status->videoHeight = newStream->height;
1976                 }
1977                 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1978                         (VIDEO_4K_HEIGHT <= newStream->height)) {
1979                     status->bIs4KVideo = true;
1980                 }
1981             }
1982             status->bEisSupportedSize &= (newStream->width <= maxEisWidth) &&
1983                                   (newStream->height <= maxEisHeight);
1984         }
1985         if (newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1986             switch (newStream->format) {
1987             case HAL_PIXEL_FORMAT_BLOB:
1988                 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1989                     status->depthPresent = true;
1990                     break;
1991                 }
1992                 stallStreamCnt++;
1993                 if (isOnEncoder(status->maxViewfinderSize, newStream->width,
1994                         newStream->height)) {
1995                     status->numStreamsOnEncoder++;
1996                     bJpegOnEncoder = true;
1997                 }
1998                 width_ratio = CEIL_DIVISION(gCamCapability[cameraId]->active_array_size.width,
1999                         newStream->width);
2000                 height_ratio = CEIL_DIVISION(gCamCapability[cameraId]->active_array_size.height,
2001                         newStream->height);;
2002                 FATAL_IF(gCamCapability[cameraId]->max_downscale_factor == 0,
2003                         "FATAL: max_downscale_factor cannot be zero and so assert");
2004                 if ( (width_ratio > gCamCapability[cameraId]->max_downscale_factor) ||
2005                     (height_ratio > gCamCapability[cameraId]->max_downscale_factor)) {
2006                     LOGH("Setting small jpeg size flag to true");
2007                     status->bSmallJpegSize = true;
2008                 }
2009                 break;
2010             case HAL_PIXEL_FORMAT_RAW10:
2011             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2012             case HAL_PIXEL_FORMAT_RAW16:
2013                 rawStreamCnt++;
2014                 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2015                         (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2016                     pdStatCount++;
2017                 }
2018                 break;
2019             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2020                 processedStreamCnt++;
2021                 if (isOnEncoder(status->maxViewfinderSize, newStream->width,
2022                         newStream->height)) {
2023                     if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2024                             !IS_USAGE_ZSL(newStream->usage)) {
2025                         status->commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2026                     }
2027                     status->numStreamsOnEncoder++;
2028                 }
2029                 break;
2030             case HAL_PIXEL_FORMAT_YCbCr_420_888:
2031             case HAL_PIXEL_FORMAT_Y8:
2032                 processedStreamCnt++;
2033                 if (isOnEncoder(status->maxViewfinderSize, newStream->width,
2034                         newStream->height)) {
2035                     // If Yuv888/Y8 size is not greater than 4K, set feature mask
2036                     // to SUPERSET so that it support concurrent request on
2037                     // YUV and JPEG.
2038                     if (newStream->width <= VIDEO_4K_WIDTH &&
2039                             newStream->height <= VIDEO_4K_HEIGHT) {
2040                         status->commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2041                     }
2042                     if (newStream->format == HAL_PIXEL_FORMAT_Y8) {
2043                         status->bY80OnEncoder = true;
2044                     }
2045                     status->numStreamsOnEncoder++;
2046                     numYuv888OnEncoder++;
2047                     status->largeYuv888Size.width = newStream->width;
2048                     status->largeYuv888Size.height = newStream->height;
2049                 }
2050                 break;
2051             default:
2052                 LOGE("not a supported format 0x%x", newStream->format);
2053                 return BAD_VALUE;
2054             }
2055         }
2056     }
2057 
2058     if (validateUsageFlagsForEis(status->bEisSupported, status->bEisSupportedSize, streamList) !=
2059             NO_ERROR) {
2060         return BAD_VALUE;
2061     }
2062 
2063     /* Check if num_streams is sane */
2064     if (stallStreamCnt > MAX_STALLING_STREAMS ||
2065             rawStreamCnt > MAX_RAW_STREAMS ||
2066             processedStreamCnt > MAX_PROCESSED_STREAMS) {
2067         LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2068                  stallStreamCnt, rawStreamCnt, processedStreamCnt);
2069         return BAD_VALUE;
2070     }
2071     /* Check whether we have zsl stream or 4k video case */
2072     if (status->isZsl && status->bIs4KVideo) {
2073         LOGE("Currently invalid configuration ZSL & 4K Video!");
2074         return BAD_VALUE;
2075     }
2076     /* Check if stream sizes are sane */
2077     if (status->numStreamsOnEncoder > 2) {
2078         LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2079         return BAD_VALUE;
2080     } else if (1 < status->numStreamsOnEncoder){
2081         status->bUseCommonFeatureMask = true;
2082         LOGH("Multiple streams above max viewfinder size, common mask needed");
2083     }
2084 
2085     /* Check if BLOB size is greater than 4k in 4k recording case */
2086     if (status->bIs4KVideo && bJpegExceeds4K) {
2087         LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2088         return BAD_VALUE;
2089     }
2090 
2091     if ((streamList->operation_mode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2092             status->depthPresent) {
2093         LOGE("HAL doesn't support depth streams in HFR mode!");
2094         return BAD_VALUE;
2095     }
2096 
2097     // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2098     // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2099     // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2100     // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2101     // configurations:
2102     //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2103     //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2104     //    (These two configurations will not have CAC2 enabled even in HQ modes.)
2105     if (!status->isZsl && bJpegOnEncoder && bJpegExceeds4K && status->bUseCommonFeatureMask) {
2106         ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2107                 __func__);
2108         return BAD_VALUE;
2109     }
2110 
2111     // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2112     // the YUV stream's size is greater or equal to the JPEG size, set common
2113     // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2114     if (numYuv888OnEncoder && isOnEncoder(status->maxViewfinderSize,
2115             jpegSize.width, jpegSize.height) &&
2116             status->largeYuv888Size.width > jpegSize.width &&
2117             status->largeYuv888Size.height > jpegSize.height) {
2118         status->bYuv888OverrideJpeg = true;
2119     } else if (!isJpeg && status->numStreamsOnEncoder > 1) {
2120         status->commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2121     }
2122 
2123     LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2124             status->maxViewfinderSize.width, status->maxViewfinderSize.height, status->isZsl,
2125             status->bUseCommonFeatureMask, status->commonFeatureMask);
2126     LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2127             status->numStreamsOnEncoder, processedStreamCnt, stallStreamCnt,
2128             status->bSmallJpegSize);
2129 
2130     if (1 < pdStatCount) {
2131         LOGE("HAL doesn't support multiple PD streams");
2132         return BAD_VALUE;
2133     }
2134 
2135     if ((streamList->operation_mode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2136             (1 == pdStatCount)) {
2137         LOGE("HAL doesn't support PD streams in HFR mode!");
2138         return -EINVAL;
2139     }
2140 
2141     return NO_ERROR;
2142 }
2143 
2144 /*===========================================================================
2145  * FUNCTION   : configureStreamsPerfLocked
2146  *
2147  * DESCRIPTION: configureStreams while perfLock is held.
2148  *
2149  * PARAMETERS :
2150  *   @stream_list : streams to be configured
2151  *
2152  * RETURN     : int32_t type of status
2153  *              NO_ERROR  -- success
2154  *              none-zero failure code
2155  *==========================================================================*/
configureStreamsPerfLocked(camera3_stream_configuration_t * streamList)2156 int QCamera3HardwareInterface::configureStreamsPerfLocked(
2157         camera3_stream_configuration_t *streamList)
2158 {
2159     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
2160 
2161     StreamValidateStatus streamStatus;
2162     auto rc = validateStreamCombination(mCameraId, streamList, &streamStatus);
2163     if (NO_ERROR != rc) {
2164         return rc;
2165     }
2166 
2167     mOpMode = streamList->operation_mode;
2168     LOGD("mOpMode: %d", mOpMode);
2169 
2170     // Disable HDR+ if it's enabled;
2171     {
2172         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
2173         finishHdrPlusClientOpeningLocked(l);
2174         disableHdrPlusModeLocked();
2175     }
2176 
2177     /* first invalidate all the steams in the mStreamList
2178      * if they appear again, they will be validated */
2179     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2180             it != mStreamInfo.end(); it++) {
2181         QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
2182         if (channel) {
2183           channel->stop();
2184         }
2185         (*it)->status = INVALID;
2186     }
2187 
2188     if (mRawDumpChannel) {
2189         mRawDumpChannel->stop();
2190         delete mRawDumpChannel;
2191         mRawDumpChannel = NULL;
2192     }
2193 
2194     if (mHdrPlusRawSrcChannel) {
2195         mHdrPlusRawSrcChannel->stop();
2196         delete mHdrPlusRawSrcChannel;
2197         mHdrPlusRawSrcChannel = NULL;
2198     }
2199 
2200     if (mSupportChannel)
2201         mSupportChannel->stop();
2202 
2203     if (mAnalysisChannel) {
2204         mAnalysisChannel->stop();
2205     }
2206     if (mMetadataChannel) {
2207         /* If content of mStreamInfo is not 0, there is metadata stream */
2208         mMetadataChannel->stop();
2209     }
2210     if (mChannelHandle) {
2211         stopChannelLocked(/*stop_immediately*/false);
2212     }
2213 
2214     pthread_mutex_lock(&mMutex);
2215 
2216     mPictureChannel = NULL;
2217 
2218     // Check state
2219     switch (mState) {
2220         case INITIALIZED:
2221         case CONFIGURED:
2222         case STARTED:
2223             /* valid state */
2224             break;
2225         default:
2226             LOGE("Invalid state %d", mState);
2227             pthread_mutex_unlock(&mMutex);
2228             return -ENODEV;
2229     }
2230 
2231     /* Check whether we have video stream */
2232     m_bIs4KVideo = streamStatus.bIs4KVideo;
2233     m_bIsVideo = streamStatus.bIsVideo;
2234     m_bEisSupported = streamStatus.bEisSupported;
2235     m_bEisSupportedSize = streamStatus.bEisSupportedSize;
2236     m_bTnrEnabled = false;
2237     m_bVideoHdrEnabled = false;
2238     cam_dimension_t previewSize = {0, 0};
2239 
2240     cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
2241 
2242     /*EIS configuration*/
2243     uint8_t eis_prop_set;
2244 
2245     // Initialize all instant AEC related variables
2246     mInstantAEC = false;
2247     mResetInstantAEC = false;
2248     mInstantAECSettledFrameNumber = 0;
2249     mAecSkipDisplayFrameBound = 0;
2250     mInstantAecFrameIdxCount = 0;
2251     mCurrFeatureState = 0;
2252     mStreamConfig = true;
2253 
2254     m_bAVTimerEnabled = false;
2255 
2256     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
2257 
2258     /* EIS setprop control */
2259     char eis_prop[PROPERTY_VALUE_MAX];
2260     memset(eis_prop, 0, sizeof(eis_prop));
2261     property_get("persist.camera.eis.enable", eis_prop, "1");
2262     eis_prop_set = (uint8_t)atoi(eis_prop);
2263 
2264     m_bEisEnable = eis_prop_set && m_bEisSupported &&
2265             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2266             (gCamCapability[mCameraId]->position == CAM_POSITION_BACK ||
2267              gCamCapability[mCameraId]->position == CAM_POSITION_BACK_AUX);
2268 
2269     LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
2270             m_bEisEnable, eis_prop_set, m_bEisSupported);
2271 
2272     uint8_t forceEnableTnr = 0;
2273     char tnr_prop[PROPERTY_VALUE_MAX];
2274     memset(tnr_prop, 0, sizeof(tnr_prop));
2275     property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2276     forceEnableTnr = (uint8_t)atoi(tnr_prop);
2277 
2278     /* Logic to enable/disable TNR based on specific config size/etc.*/
2279     if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
2280             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2281         m_bTnrEnabled = true;
2282     else if (forceEnableTnr)
2283         m_bTnrEnabled = true;
2284 
2285     char videoHdrProp[PROPERTY_VALUE_MAX];
2286     memset(videoHdrProp, 0, sizeof(videoHdrProp));
2287     property_get("persist.camera.hdr.video", videoHdrProp, "0");
2288     uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2289 
2290     if (hdr_mode_prop == 1 && m_bIsVideo &&
2291             mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2292         m_bVideoHdrEnabled = true;
2293     else
2294         m_bVideoHdrEnabled = false;
2295 
2296     camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2297     for (size_t i = 0; i < streamList->num_streams; i++) {
2298         camera3_stream_t *newStream = streamList->streams[i];
2299         LOGH("newStream type = %d, stream format = %d "
2300                 "stream size : %d x %d, stream rotation = %d",
2301                  newStream->stream_type, newStream->format,
2302                 newStream->width, newStream->height, newStream->rotation);
2303         //if the stream is in the mStreamList validate it
2304         bool stream_exists = false;
2305         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2306                 it != mStreamInfo.end(); it++) {
2307             if ((*it)->stream == newStream) {
2308                 QCamera3ProcessingChannel *channel =
2309                     (QCamera3ProcessingChannel*)(*it)->stream->priv;
2310                 stream_exists = true;
2311                 if (channel)
2312                     delete channel;
2313                 (*it)->status = VALID;
2314                 (*it)->stream->priv = NULL;
2315                 (*it)->channel = NULL;
2316             }
2317         }
2318         if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2319             //new stream
2320             stream_info_t* stream_info;
2321             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2322             if (!stream_info) {
2323                LOGE("Could not allocate stream info");
2324                rc = -ENOMEM;
2325                pthread_mutex_unlock(&mMutex);
2326                return rc;
2327             }
2328             stream_info->stream = newStream;
2329             stream_info->status = VALID;
2330             stream_info->channel = NULL;
2331             stream_info->id = i; // ID will be re-assigned in cleanAndSortStreamInfo().
2332             mStreamInfo.push_back(stream_info);
2333         }
2334         /* Covers Opaque ZSL and API1 F/W ZSL */
2335         if (IS_USAGE_ZSL(newStream->usage)
2336                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2337             if (zslStream != NULL) {
2338                 LOGE("Multiple input/reprocess streams requested!");
2339                 pthread_mutex_unlock(&mMutex);
2340                 return BAD_VALUE;
2341             }
2342             zslStream = newStream;
2343         }
2344         /* Covers YUV reprocess */
2345         if (streamStatus.inputStream != NULL) {
2346             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2347                     && ((newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2348                          && streamStatus.inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888)
2349                         || (newStream->format == HAL_PIXEL_FORMAT_Y8
2350                          && streamStatus.inputStream->format == HAL_PIXEL_FORMAT_Y8))
2351                     && streamStatus.inputStream->width == newStream->width
2352                     && streamStatus.inputStream->height == newStream->height) {
2353                 if (zslStream != NULL) {
2354                     /* This scenario indicates multiple YUV streams with same size
2355                      * as input stream have been requested, since zsl stream handle
2356                      * is solely use for the purpose of overriding the size of streams
2357                      * which share h/w streams we will just make a guess here as to
2358                      * which of the stream is a ZSL stream, this will be refactored
2359                      * once we make generic logic for streams sharing encoder output
2360                      */
2361                     LOGH("Warning, Multiple ip/reprocess streams requested!");
2362                 }
2363                 zslStream = newStream;
2364             }
2365         }
2366     }
2367 
2368     /* If a zsl stream is set, we know that we have configured at least one input or
2369        bidirectional stream */
2370     if (NULL != zslStream) {
2371         mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2372         mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2373         mInputStreamInfo.format = zslStream->format;
2374         mInputStreamInfo.usage = zslStream->usage;
2375         LOGD("Input stream configured! %d x %d, format %d, usage %d",
2376                  mInputStreamInfo.dim.width,
2377                 mInputStreamInfo.dim.height,
2378                 mInputStreamInfo.format, mInputStreamInfo.usage);
2379     }
2380 
2381     cleanAndSortStreamInfo();
2382     if (mMetadataChannel) {
2383         delete mMetadataChannel;
2384         mMetadataChannel = NULL;
2385     }
2386     if (mSupportChannel) {
2387         delete mSupportChannel;
2388         mSupportChannel = NULL;
2389     }
2390 
2391     if (mAnalysisChannel) {
2392         delete mAnalysisChannel;
2393         mAnalysisChannel = NULL;
2394     }
2395 
2396     if (mDummyBatchChannel) {
2397         delete mDummyBatchChannel;
2398         mDummyBatchChannel = NULL;
2399     }
2400 
2401     if (mDepthChannel) {
2402         mDepthChannel = NULL;
2403     }
2404     mDepthCloudMode = CAM_PD_DATA_SKIP;
2405 
2406     mShutterDispatcher.clear();
2407     mOutputBufferDispatcher.clear();
2408 
2409     char is_type_value[PROPERTY_VALUE_MAX];
2410     property_get("persist.camera.is_type", is_type_value, "4");
2411     m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2412 
2413     char property_value[PROPERTY_VALUE_MAX];
2414     property_get("persist.camera.gzoom.at", property_value, "0");
2415     int goog_zoom_at = atoi(property_value);
2416     bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2417         gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2418     bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2419         gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2420 
2421     property_get("persist.camera.gzoom.4k", property_value, "0");
2422     bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2423 
2424     //Create metadata channel and initialize it
2425     cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2426     setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2427             gCamCapability[mCameraId]->color_arrangement);
2428     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2429                     mChannelHandle, mCameraHandle->ops, captureResultCb,
2430                     setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
2431     if (mMetadataChannel == NULL) {
2432         LOGE("failed to allocate metadata channel");
2433         rc = -ENOMEM;
2434         pthread_mutex_unlock(&mMutex);
2435         return rc;
2436     }
2437     mMetadataChannel->enableDepthData(streamStatus.depthPresent);
2438     rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2439     if (rc < 0) {
2440         LOGE("metadata channel initialization failed");
2441         delete mMetadataChannel;
2442         mMetadataChannel = NULL;
2443         pthread_mutex_unlock(&mMutex);
2444         return rc;
2445     }
2446 
2447     cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
2448     bool isRawStreamRequested = false;
2449     bool onlyRaw = true;
2450     // Keep track of preview/video streams indices.
2451     // There could be more than one preview streams, but only one video stream.
2452     int32_t video_stream_idx = -1;
2453     int32_t preview_stream_idx[streamList->num_streams];
2454     size_t preview_stream_cnt = 0;
2455     bool previewTnr[streamList->num_streams];
2456     memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2457     bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2458     // Loop through once to determine preview TNR conditions before creating channels.
2459     for (size_t i = 0; i < streamList->num_streams; i++) {
2460         camera3_stream_t *newStream = streamList->streams[i];
2461         uint32_t stream_usage = newStream->usage;
2462         if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2463                 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2464             if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2465                 video_stream_idx = (int32_t)i;
2466             else
2467                 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2468         }
2469     }
2470     // By default, preview stream TNR is disabled.
2471     // Enable TNR to the preview stream if all conditions below are satisfied:
2472     //  1. preview resolution == video resolution.
2473     //  2. video stream TNR is enabled.
2474     //  3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2475     for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2476         camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2477         camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2478         if (m_bTnrEnabled && m_bTnrVideo &&
2479                 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2480                 video_stream->width == preview_stream->width &&
2481                 video_stream->height == preview_stream->height) {
2482             previewTnr[preview_stream_idx[i]] = true;
2483         }
2484     }
2485 
2486     memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2487     /* Allocate channel objects for the requested streams */
2488     for (size_t i = 0; i < streamList->num_streams; i++) {
2489 
2490         camera3_stream_t *newStream = streamList->streams[i];
2491         uint32_t stream_usage = newStream->usage;
2492         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2493         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2494         struct camera_info *p_info = NULL;
2495         pthread_mutex_lock(&gCamLock);
2496         p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2497         pthread_mutex_unlock(&gCamLock);
2498         if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2499                 || IS_USAGE_ZSL(newStream->usage)) &&
2500             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
2501             onlyRaw = false; // There is non-raw stream - bypass flag if set
2502             mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2503             if (isOnEncoder(streamStatus.maxViewfinderSize, newStream->width, newStream->height)) {
2504                 if (streamStatus.bUseCommonFeatureMask)
2505                     zsl_ppmask = streamStatus.commonFeatureMask;
2506                 else
2507                     zsl_ppmask = CAM_QCOM_FEATURE_NONE;
2508             } else {
2509                 if (streamStatus.numStreamsOnEncoder > 0)
2510                     zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2511                 else
2512                     zsl_ppmask = CAM_QCOM_FEATURE_NONE;
2513             }
2514             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
2515         } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
2516             onlyRaw = false; // There is non-raw stream - bypass flag if set
2517                 LOGH("Input stream configured, reprocess config");
2518         } else {
2519             //for non zsl streams find out the format
2520             switch (newStream->format) {
2521             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2522             {
2523                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2524                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2525                         CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2526                 /* add additional features to pp feature mask */
2527                 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2528                         mStreamConfigInfo.num_streams);
2529 
2530                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2531                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2532                                 CAM_STREAM_TYPE_VIDEO;
2533                     if (m_bTnrEnabled && m_bTnrVideo) {
2534                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2535                             CAM_QCOM_FEATURE_CPP_TNR;
2536                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2537                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2538                                 ~CAM_QCOM_FEATURE_CDS;
2539                     }
2540                     if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2541                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2542                             CAM_QTI_FEATURE_PPEISCORE;
2543                     }
2544                     if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2545                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2546                             CAM_QCOM_FEATURE_GOOG_ZOOM;
2547                     }
2548                 } else {
2549                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2550                             CAM_STREAM_TYPE_PREVIEW;
2551                     if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
2552                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2553                                 CAM_QCOM_FEATURE_CPP_TNR;
2554                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2555                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2556                                 ~CAM_QCOM_FEATURE_CDS;
2557                     }
2558                     if(!m_bSwTnrPreview) {
2559                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2560                                 ~CAM_QTI_FEATURE_SW_TNR;
2561                     }
2562                     if (is_goog_zoom_preview_enabled) {
2563                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2564                             CAM_QCOM_FEATURE_GOOG_ZOOM;
2565                     }
2566                     padding_info.width_padding = mSurfaceStridePadding;
2567                     padding_info.height_padding = CAM_PAD_TO_2;
2568                     previewSize.width = (int32_t)newStream->width;
2569                     previewSize.height = (int32_t)newStream->height;
2570                 }
2571                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2572                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2573                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2574                             newStream->height;
2575                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2576                             newStream->width;
2577                 }
2578             }
2579             break;
2580             case HAL_PIXEL_FORMAT_YCbCr_420_888:
2581             case HAL_PIXEL_FORMAT_Y8:
2582                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2583                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2584                 if (isOnEncoder(streamStatus.maxViewfinderSize, newStream->width,
2585                             newStream->height)) {
2586                     if (streamStatus.bUseCommonFeatureMask)
2587                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2588                                 streamStatus.commonFeatureMask;
2589                     else
2590                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2591                                 CAM_QCOM_FEATURE_NONE;
2592                 } else {
2593                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2594                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2595                 }
2596             break;
2597             case HAL_PIXEL_FORMAT_BLOB:
2598                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2599                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2600                 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2601                 if ((m_bIs4KVideo && !streamStatus.isZsl) ||
2602                         (streamStatus.bSmallJpegSize && !streamStatus.isZsl)) {
2603                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2604                              CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2605                      /* Remove rotation if it is not supported
2606                         for 4K LiveVideo snapshot case (online processing) */
2607                      if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2608                                 CAM_QCOM_FEATURE_ROTATION)) {
2609                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2610                                  &= ~CAM_QCOM_FEATURE_ROTATION;
2611                      }
2612                 } else {
2613                     if (streamStatus.bUseCommonFeatureMask &&
2614                             isOnEncoder(streamStatus.maxViewfinderSize, newStream->width,
2615                             newStream->height)) {
2616                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2617                                 streamStatus.commonFeatureMask;
2618                     } else {
2619                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2620                     }
2621                 }
2622                 if (streamStatus.isZsl) {
2623                     if (zslStream) {
2624                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2625                                 (int32_t)zslStream->width;
2626                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2627                                 (int32_t)zslStream->height;
2628                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2629                                 zsl_ppmask;
2630                     } else {
2631                         LOGE("Error, No ZSL stream identified");
2632                         pthread_mutex_unlock(&mMutex);
2633                         return -EINVAL;
2634                     }
2635                 } else if (m_bIs4KVideo) {
2636                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2637                             (int32_t) streamStatus.videoWidth;
2638                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2639                             (int32_t) streamStatus.videoHeight;
2640                 } else if (streamStatus.bYuv888OverrideJpeg) {
2641                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2642                             (int32_t) streamStatus.largeYuv888Size.width;
2643                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2644                             (int32_t) streamStatus.largeYuv888Size.height;
2645                 }
2646                 break;
2647             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2648             case HAL_PIXEL_FORMAT_RAW16:
2649             case HAL_PIXEL_FORMAT_RAW10:
2650                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2651                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2652                 isRawStreamRequested = true;
2653                 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2654                         (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2655                     mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2656                             gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2657                     mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2658                             gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2659                     mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2660                             gCamCapability[mCameraId]->dt[mPDIndex];
2661                     mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2662                             gCamCapability[mCameraId]->vc[mPDIndex];
2663                 }
2664                 break;
2665             default:
2666                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2667                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2668                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2669                 break;
2670             }
2671         }
2672 
2673         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2674                 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2675                 gCamCapability[mCameraId]->color_arrangement);
2676 
2677         if (newStream->priv == NULL) {
2678             //New stream, construct channel
2679             switch (newStream->stream_type) {
2680             case CAMERA3_STREAM_INPUT:
2681                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2682                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2683                 break;
2684             case CAMERA3_STREAM_BIDIRECTIONAL:
2685                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2686                     GRALLOC_USAGE_HW_CAMERA_WRITE;
2687                 break;
2688             case CAMERA3_STREAM_OUTPUT:
2689                 /* For video encoding stream, set read/write rarely
2690                  * flag so that they may be set to un-cached */
2691                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2692                     newStream->usage |=
2693                          (GRALLOC_USAGE_SW_READ_RARELY |
2694                          GRALLOC_USAGE_SW_WRITE_RARELY |
2695                          GRALLOC_USAGE_HW_CAMERA_WRITE);
2696                 else if (IS_USAGE_ZSL(newStream->usage))
2697                 {
2698                     LOGD("ZSL usage flag skipping");
2699                 }
2700                 else if (newStream == zslStream
2701                         || (newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
2702                             newStream->format == HAL_PIXEL_FORMAT_Y8)) {
2703                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2704                 } else
2705                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2706                 break;
2707             default:
2708                 LOGE("Invalid stream_type %d", newStream->stream_type);
2709                 break;
2710             }
2711 
2712             bool forcePreviewUBWC = true;
2713             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2714                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2715                 QCamera3ProcessingChannel *channel = NULL;
2716                 switch (newStream->format) {
2717                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2718                     if ((newStream->usage &
2719                             private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2720                             (streamList->operation_mode ==
2721                             CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2722                     ) {
2723                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2724                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
2725                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2726                                 this,
2727                                 newStream,
2728                                 (cam_stream_type_t)
2729                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2730                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2731                                 mMetadataChannel,
2732                                 0); //heap buffers are not required for HFR video channel
2733                         if (channel == NULL) {
2734                             LOGE("allocation of channel failed");
2735                             pthread_mutex_unlock(&mMutex);
2736                             return -ENOMEM;
2737                         }
2738                         //channel->getNumBuffers() will return 0 here so use
2739                         //MAX_INFLIGH_HFR_REQUESTS
2740                         newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2741                         newStream->priv = channel;
2742                         LOGI("num video buffers in HFR mode: %d",
2743                                  MAX_INFLIGHT_HFR_REQUESTS);
2744                     } else {
2745                         /* Copy stream contents in HFR preview only case to create
2746                          * dummy batch channel so that sensor streaming is in
2747                          * HFR mode */
2748                         if (!m_bIsVideo && (streamList->operation_mode ==
2749                                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2750                             mDummyBatchStream = *newStream;
2751                             mDummyBatchStream.usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
2752                         }
2753                         int bufferCount = MAX_INFLIGHT_REQUESTS;
2754                         if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2755                                 CAM_STREAM_TYPE_VIDEO) {
2756                             if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2757                                 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2758                                 bufferCount = m_bIs4KVideo ?
2759                                     MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2760                             }
2761 
2762                         }
2763                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2764                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
2765                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2766                                 this,
2767                                 newStream,
2768                                 (cam_stream_type_t)
2769                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2770                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2771                                 mMetadataChannel,
2772                                 bufferCount);
2773                         if (channel == NULL) {
2774                             LOGE("allocation of channel failed");
2775                             pthread_mutex_unlock(&mMutex);
2776                             return -ENOMEM;
2777                         }
2778                         /* disable UBWC for preview, though supported,
2779                          * to take advantage of CPP duplication */
2780                         if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
2781                                 (previewSize.width == (int32_t) streamStatus.videoWidth)&&
2782                                 (previewSize.height == (int32_t) streamStatus.videoHeight)){
2783                             forcePreviewUBWC = false;
2784                         }
2785                         channel->setUBWCEnabled(forcePreviewUBWC);
2786                          /* When goog_zoom is linked to the preview or video stream,
2787                           * disable ubwc to the linked stream */
2788                         if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2789                                 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2790                             channel->setUBWCEnabled(false);
2791                         }
2792                         newStream->max_buffers = channel->getNumBuffers();
2793                         newStream->priv = channel;
2794                     }
2795                     break;
2796                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2797                 case HAL_PIXEL_FORMAT_Y8: {
2798                     channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2799                             mChannelHandle,
2800                             mCameraHandle->ops, captureResultCb,
2801                             setBufferErrorStatus, &padding_info,
2802                             this,
2803                             newStream,
2804                             (cam_stream_type_t)
2805                                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2806                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2807                             mMetadataChannel);
2808                     if (channel == NULL) {
2809                         LOGE("allocation of YUV channel failed");
2810                         pthread_mutex_unlock(&mMutex);
2811                         return -ENOMEM;
2812                     }
2813                     newStream->max_buffers = channel->getNumBuffers();
2814                     newStream->priv = channel;
2815                     break;
2816                 }
2817                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2818                 case HAL_PIXEL_FORMAT_RAW16:
2819                 case HAL_PIXEL_FORMAT_RAW10: {
2820                     bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2821                             (HAL_DATASPACE_DEPTH != newStream->data_space))
2822                             ? true : false;
2823                     mRawChannel = new QCamera3RawChannel(
2824                             mCameraHandle->camera_handle, mChannelHandle,
2825                             mCameraHandle->ops, captureResultCb,
2826                             setBufferErrorStatus, &padding_info,
2827                             this, newStream,
2828                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2829                             mMetadataChannel, isRAW16);
2830                     if (mRawChannel == NULL) {
2831                         LOGE("allocation of raw channel failed");
2832                         pthread_mutex_unlock(&mMutex);
2833                         return -ENOMEM;
2834                     }
2835                     newStream->max_buffers = mRawChannel->getNumBuffers();
2836                     newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2837                     break;
2838                 }
2839                 case HAL_PIXEL_FORMAT_BLOB:
2840                     if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2841                         mDepthChannel = new QCamera3DepthChannel(
2842                                 mCameraHandle->camera_handle, mChannelHandle,
2843                                 mCameraHandle->ops, NULL, NULL, &padding_info,
2844                                 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2845                                 mMetadataChannel);
2846                         if (NULL == mDepthChannel) {
2847                             LOGE("Allocation of depth channel failed");
2848                             pthread_mutex_unlock(&mMutex);
2849                             return NO_MEMORY;
2850                         }
2851                         newStream->priv = mDepthChannel;
2852                         newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2853                     } else {
2854                         // Max live snapshot inflight buffer is 1. This is to mitigate
2855                         // frame drop issues for video snapshot. The more buffers being
2856                         // allocated, the more frame drops there are.
2857                         mPictureChannel = new QCamera3PicChannel(
2858                                 mCameraHandle->camera_handle, mChannelHandle,
2859                                 mCameraHandle->ops, captureResultCb,
2860                                 setBufferErrorStatus, &padding_info, this, newStream,
2861                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2862                                 m_bIs4KVideo, streamStatus.isZsl, streamStatus.bY80OnEncoder,
2863                                 mMetadataChannel, (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2864                         if (mPictureChannel == NULL) {
2865                             LOGE("allocation of channel failed");
2866                             pthread_mutex_unlock(&mMutex);
2867                             return -ENOMEM;
2868                         }
2869                         newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2870                         newStream->max_buffers = mPictureChannel->getNumBuffers();
2871                         mPictureChannel->overrideYuvSize(
2872                                 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2873                                 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2874                     }
2875                     break;
2876 
2877                 default:
2878                     LOGE("not a supported format 0x%x", newStream->format);
2879                     pthread_mutex_unlock(&mMutex);
2880                     return -EINVAL;
2881                 }
2882             } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2883                 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2884             } else {
2885                 LOGE("Error, Unknown stream type");
2886                 pthread_mutex_unlock(&mMutex);
2887                 return -EINVAL;
2888             }
2889 
2890             QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2891             if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2892                 // Here we only care whether it's EIS3 or not
2893                 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2894                 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2895                         mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2896                     isType = IS_TYPE_NONE;
2897                 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
2898                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2899                         newStream->width, newStream->height, forcePreviewUBWC, isType);
2900                 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2901                     newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2902                 }
2903             }
2904 
2905             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2906                     it != mStreamInfo.end(); it++) {
2907                 if ((*it)->stream == newStream) {
2908                     (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2909                     break;
2910                 }
2911             }
2912         } else {
2913             // Channel already exists for this stream
2914             // Do nothing for now
2915         }
2916         padding_info = gCamCapability[mCameraId]->padding_info;
2917 
2918         /* Do not add entries for input&depth stream in metastream info
2919          * since there is no real stream associated with it
2920          */
2921         if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
2922                 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2923                         (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
2924             mStreamConfigInfo.num_streams++;
2925         }
2926     }
2927 
2928     // Let buffer dispatcher know the configured streams.
2929     mOutputBufferDispatcher.configureStreams(streamList);
2930 
2931     if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2932         onlyRaw = false;
2933     }
2934 
2935     // Create analysis stream all the time, even when h/w support is not available
2936     if (!onlyRaw) {
2937         cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2938         cam_analysis_info_t analysisInfo;
2939         int32_t ret = NO_ERROR;
2940         ret = mCommon.getAnalysisInfo(
2941                 FALSE,
2942                 analysisFeatureMask,
2943                 &analysisInfo);
2944         if (ret == NO_ERROR) {
2945             cam_color_filter_arrangement_t analysis_color_arrangement =
2946                     (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2947                     CAM_FILTER_ARRANGEMENT_Y :
2948                     gCamCapability[mCameraId]->color_arrangement);
2949             setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2950                                                analysis_color_arrangement);
2951             cam_dimension_t analysisDim;
2952             analysisDim = mCommon.getMatchingDimension(previewSize,
2953                     analysisInfo.analysis_recommended_res);
2954 
2955             mAnalysisChannel = new QCamera3SupportChannel(
2956                     mCameraHandle->camera_handle,
2957                     mChannelHandle,
2958                     mCameraHandle->ops,
2959                     &analysisInfo.analysis_padding_info,
2960                     analysisFeatureMask,
2961                     CAM_STREAM_TYPE_ANALYSIS,
2962                     &analysisDim,
2963                     (analysisInfo.analysis_format
2964                     == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2965                     : CAM_FORMAT_YUV_420_NV21),
2966                     analysisInfo.hw_analysis_supported,
2967                     gCamCapability[mCameraId]->color_arrangement,
2968                     this,
2969                     0); // force buffer count to 0
2970         } else {
2971             LOGW("getAnalysisInfo failed, ret = %d", ret);
2972         }
2973         if (!mAnalysisChannel) {
2974             LOGW("Analysis channel cannot be created");
2975         }
2976     }
2977 
2978     //RAW DUMP channel
2979     if (mEnableRawDump && isRawStreamRequested == false){
2980         cam_dimension_t rawDumpSize;
2981         rawDumpSize = getMaxRawSize(mCameraId);
2982         cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2983         setPAAFSupport(rawDumpFeatureMask,
2984                 CAM_STREAM_TYPE_RAW,
2985                 gCamCapability[mCameraId]->color_arrangement);
2986         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2987                                   mChannelHandle,
2988                                   mCameraHandle->ops,
2989                                   rawDumpSize,
2990                                   &padding_info,
2991                                   this, rawDumpFeatureMask);
2992         if (!mRawDumpChannel) {
2993             LOGE("Raw Dump channel cannot be created");
2994             pthread_mutex_unlock(&mMutex);
2995             return -ENOMEM;
2996         }
2997     }
2998 
2999     if (mAnalysisChannel) {
3000         cam_analysis_info_t analysisInfo;
3001         memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
3002         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3003                 CAM_STREAM_TYPE_ANALYSIS;
3004         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3005                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3006         rc = mCommon.getAnalysisInfo(FALSE,
3007                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3008                 &analysisInfo);
3009         if (rc != NO_ERROR) {
3010             LOGE("getAnalysisInfo failed, ret = %d", rc);
3011             pthread_mutex_unlock(&mMutex);
3012             return rc;
3013         }
3014         cam_color_filter_arrangement_t analysis_color_arrangement =
3015                 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
3016                 CAM_FILTER_ARRANGEMENT_Y :
3017                 gCamCapability[mCameraId]->color_arrangement);
3018         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3019                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3020                 analysis_color_arrangement);
3021 
3022         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
3023                 mCommon.getMatchingDimension(previewSize,
3024                 analysisInfo.analysis_recommended_res);
3025         mStreamConfigInfo.num_streams++;
3026     }
3027 
3028     if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
3029         cam_analysis_info_t supportInfo;
3030         memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
3031         cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3032         setPAAFSupport(callbackFeatureMask,
3033                 CAM_STREAM_TYPE_CALLBACK,
3034                 gCamCapability[mCameraId]->color_arrangement);
3035         int32_t ret = NO_ERROR;
3036         ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
3037         if (ret != NO_ERROR) {
3038             /* Ignore the error for Mono camera
3039              * because the PAAF bit mask is only set
3040              * for CAM_STREAM_TYPE_ANALYSIS stream type
3041              */
3042             if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
3043                 LOGW("getAnalysisInfo failed, ret = %d", ret);
3044             }
3045         }
3046         mSupportChannel = new QCamera3SupportChannel(
3047                 mCameraHandle->camera_handle,
3048                 mChannelHandle,
3049                 mCameraHandle->ops,
3050                 &gCamCapability[mCameraId]->padding_info,
3051                 callbackFeatureMask,
3052                 CAM_STREAM_TYPE_CALLBACK,
3053                 &QCamera3SupportChannel::kDim,
3054                 CAM_FORMAT_YUV_420_NV21,
3055                 supportInfo.hw_analysis_supported,
3056                 gCamCapability[mCameraId]->color_arrangement,
3057                 this, 0);
3058         if (!mSupportChannel) {
3059             LOGE("dummy channel cannot be created");
3060             pthread_mutex_unlock(&mMutex);
3061             return -ENOMEM;
3062         }
3063     }
3064 
3065     if (mSupportChannel) {
3066         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
3067                 QCamera3SupportChannel::kDim;
3068         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3069                 CAM_STREAM_TYPE_CALLBACK;
3070         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3071                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3072         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3073                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3074                 gCamCapability[mCameraId]->color_arrangement);
3075         mStreamConfigInfo.num_streams++;
3076     }
3077 
3078     if (mRawDumpChannel) {
3079         cam_dimension_t rawSize;
3080         rawSize = getMaxRawSize(mCameraId);
3081         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
3082                 rawSize;
3083         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3084                 CAM_STREAM_TYPE_RAW;
3085         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3086                 CAM_QCOM_FEATURE_NONE;
3087         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3088                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3089                 gCamCapability[mCameraId]->color_arrangement);
3090         mStreamConfigInfo.num_streams++;
3091     }
3092 
3093     if (mHdrPlusRawSrcChannel) {
3094         cam_dimension_t rawSize;
3095         rawSize = getMaxRawSize(mCameraId);
3096         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
3097         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
3098         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
3099         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3100                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3101                 gCamCapability[mCameraId]->color_arrangement);
3102         mStreamConfigInfo.num_streams++;
3103     }
3104 
3105     /* In HFR mode, if video stream is not added, create a dummy channel so that
3106      * ISP can create a batch mode even for preview only case. This channel is
3107      * never 'start'ed (no stream-on), it is only 'initialized'  */
3108     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3109             !m_bIsVideo) {
3110         cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3111         setPAAFSupport(dummyFeatureMask,
3112                 CAM_STREAM_TYPE_VIDEO,
3113                 gCamCapability[mCameraId]->color_arrangement);
3114         mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3115                 mChannelHandle,
3116                 mCameraHandle->ops, captureResultCb,
3117                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
3118                 this,
3119                 &mDummyBatchStream,
3120                 CAM_STREAM_TYPE_VIDEO,
3121                 dummyFeatureMask,
3122                 mMetadataChannel);
3123         if (NULL == mDummyBatchChannel) {
3124             LOGE("creation of mDummyBatchChannel failed."
3125                     "Preview will use non-hfr sensor mode ");
3126         }
3127     }
3128     if (mDummyBatchChannel) {
3129         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3130                 mDummyBatchStream.width;
3131         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3132                 mDummyBatchStream.height;
3133         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3134                 CAM_STREAM_TYPE_VIDEO;
3135         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3136                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3137         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3138                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3139                 gCamCapability[mCameraId]->color_arrangement);
3140         mStreamConfigInfo.num_streams++;
3141     }
3142 
3143     mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3144     mStreamConfigInfo.buffer_info.max_buffers =
3145             m_bIs4KVideo ? 0 :
3146             m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
3147 
3148     /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3149     for (pendingRequestIterator i = mPendingRequestsList.begin();
3150             i != mPendingRequestsList.end();) {
3151         i = erasePendingRequest(i);
3152     }
3153     mPendingFrameDropList.clear();
3154     // Initialize/Reset the pending buffers list
3155     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3156         req.mPendingBufferList.clear();
3157     }
3158     mPendingBuffersMap.mPendingBuffersInRequest.clear();
3159     mExpectedInflightDuration = 0;
3160     mExpectedFrameDuration = 0;
3161 
3162     mCurJpegMeta.clear();
3163     //Get min frame duration for this streams configuration
3164     deriveMinFrameDuration();
3165 
3166     mFirstPreviewIntentSeen = false;
3167 
3168     // Update state
3169     mState = CONFIGURED;
3170 
3171     mFirstMetadataCallback = true;
3172 
3173     memset(&mLastEISCropInfo, 0, sizeof(mLastEISCropInfo));
3174 
3175     if (streamList->session_parameters != nullptr) {
3176         CameraMetadata meta;
3177         meta = streamList->session_parameters;
3178 
3179         // send an unconfigure to the backend so that the isp
3180         // resources are deallocated
3181         if (!mFirstConfiguration) {
3182             cam_stream_size_info_t stream_config_info;
3183             int32_t hal_version = CAM_HAL_V3;
3184             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3185             stream_config_info.buffer_info.min_buffers =
3186                     MIN_INFLIGHT_REQUESTS;
3187             stream_config_info.buffer_info.max_buffers =
3188                     m_bIs4KVideo ? 0 :
3189                     m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
3190             clear_metadata_buffer(mParameters);
3191             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3192                     CAM_INTF_PARM_HAL_VERSION, hal_version);
3193             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3194                     CAM_INTF_META_STREAM_INFO, stream_config_info);
3195             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3196                     mParameters);
3197             if (rc < 0) {
3198                 LOGE("set_parms for unconfigure failed");
3199                 pthread_mutex_unlock(&mMutex);
3200                 return rc;
3201             }
3202 
3203         }
3204         /* get eis information for stream configuration */
3205         cam_is_type_t isTypePreview, is_type=IS_TYPE_NONE;
3206         char is_type_value[PROPERTY_VALUE_MAX];
3207         property_get("persist.camera.is_type", is_type_value, "4");
3208         m_ISTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
3209         // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
3210         property_get("persist.camera.is_type_preview", is_type_value, "4");
3211         isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
3212         LOGD("isTypeVideo: %d isTypePreview: %d", m_ISTypeVideo, isTypePreview);
3213 
3214         int32_t hal_version = CAM_HAL_V3;
3215         clear_metadata_buffer(mParameters);
3216         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3217         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, mCaptureIntent);
3218 
3219         if (mFirstConfiguration) {
3220             // configure instant AEC
3221             // Instant AEC is a session based parameter and it is needed only
3222             // once per complete session after open camera.
3223             // i.e. This is set only once for the first capture request, after open camera.
3224             setInstantAEC(meta);
3225         }
3226 
3227         bool setEis = isEISEnabled(meta);
3228         int32_t vsMode;
3229         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3230         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3231             rc = BAD_VALUE;
3232         }
3233         LOGD("setEis %d", setEis);
3234         bool eis3Supported = false;
3235         size_t count = IS_TYPE_MAX;
3236         count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
3237         for (size_t i = 0; i < count; i++) {
3238             if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
3239                 eis3Supported = true;
3240                 break;
3241             }
3242         }
3243 
3244         //IS type will be 0 unless EIS is supported. If EIS is supported
3245         //it could either be 4 or 5 depending on the stream and video size
3246         for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3247             if (setEis) {
3248                 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
3249                     is_type = isTypePreview;
3250                 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
3251                     if ( (m_ISTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
3252                         LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
3253                         is_type = IS_TYPE_EIS_2_0;
3254                     } else {
3255                         is_type = m_ISTypeVideo;
3256                     }
3257                 } else {
3258                     is_type = IS_TYPE_NONE;
3259                 }
3260                  mStreamConfigInfo.is_type[i] = is_type;
3261             } else {
3262                  mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
3263             }
3264         }
3265 
3266         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3267                 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3268 
3269         char prop[PROPERTY_VALUE_MAX];
3270         //Disable tintless only if the property is set to 0
3271         memset(prop, 0, sizeof(prop));
3272         property_get("persist.camera.tintless.enable", prop, "1");
3273         int32_t tintless_value = atoi(prop);
3274 
3275         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3276                 CAM_INTF_PARM_TINTLESS, tintless_value);
3277 
3278         //Disable CDS for HFR mode or if DIS/EIS is on.
3279         //CDS is a session parameter in the backend/ISP, so need to be set/reset
3280         //after every configure_stream
3281         if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3282                 (m_bIsVideo)) {
3283             int32_t cds = CAM_CDS_MODE_OFF;
3284             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3285                     CAM_INTF_PARM_CDS_MODE, cds))
3286                 LOGE("Failed to disable CDS for HFR mode");
3287 
3288         }
3289 
3290         if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
3291             uint8_t* use_av_timer = NULL;
3292 
3293             if (m_debug_avtimer){
3294                 LOGI(" Enabling AV timer through setprop");
3295                 use_av_timer = &m_debug_avtimer;
3296                 m_bAVTimerEnabled = true;
3297             }
3298             else{
3299                 use_av_timer =
3300                     meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
3301                 if (use_av_timer) {
3302                     m_bAVTimerEnabled = true;
3303                     LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
3304                 }
3305             }
3306 
3307             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
3308                 rc = BAD_VALUE;
3309             }
3310         }
3311 
3312         setMobicat();
3313 
3314         /* Set fps and hfr mode while sending meta stream info so that sensor
3315          * can configure appropriate streaming mode */
3316         mHFRVideoFps = DEFAULT_VIDEO_FPS;
3317         mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
3318         mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3319         if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3320             rc = setHalFpsRange(meta, mParameters);
3321             if (rc == NO_ERROR) {
3322                 int32_t max_fps =
3323                     (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3324                 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
3325                     mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
3326                 }
3327                 /* For HFR, more buffers are dequeued upfront to improve the performance */
3328                 if (mBatchSize) {
3329                     mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3330                     mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3331                 }
3332             }
3333             else {
3334                 LOGE("setHalFpsRange failed");
3335             }
3336         }
3337         memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
3338 
3339         if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
3340             cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
3341                     meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
3342             rc = setVideoHdrMode(mParameters, vhdr);
3343             if (rc != NO_ERROR) {
3344                 LOGE("setVideoHDR is failed");
3345             }
3346         }
3347 
3348         if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
3349             uint8_t sensorModeFullFov =
3350                     meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
3351             LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
3352             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
3353                     sensorModeFullFov)) {
3354                 rc = BAD_VALUE;
3355             }
3356         }
3357         //TODO: validate the arguments, HSV scenemode should have only the
3358         //advertised fps ranges
3359 
3360         /*set the capture intent, hal version, tintless, stream info,
3361          *and disenable parameters to the backend*/
3362         LOGD("set_parms META_STREAM_INFO " );
3363         for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3364             LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
3365                     ", Format:%d is_type: %d",
3366                     mStreamConfigInfo.type[i],
3367                     mStreamConfigInfo.stream_sizes[i].width,
3368                     mStreamConfigInfo.stream_sizes[i].height,
3369                     mStreamConfigInfo.postprocess_mask[i],
3370                     mStreamConfigInfo.format[i],
3371                     mStreamConfigInfo.is_type[i]);
3372         }
3373 
3374         rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3375                     mParameters);
3376         if (rc < 0) {
3377             LOGE("set_parms failed for hal version, stream info");
3378         }
3379 
3380     }
3381 
3382     pthread_mutex_unlock(&mMutex);
3383 
3384     return rc;
3385 }
3386 
3387 /*===========================================================================
3388  * FUNCTION   : isEISEnabled
3389  *
3390  * DESCRIPTION: Decide whether EIS should get enabled or not.
3391  *
3392  * PARAMETERS :
3393  *   @meta : request from framework to process
3394  *
3395  * RETURN     : true/false Whether EIS should be enabled
3396  *
3397  *==========================================================================*/
isEISEnabled(const CameraMetadata & meta)3398 bool QCamera3HardwareInterface::isEISEnabled(const CameraMetadata& meta) {
3399     uint8_t fwkVideoStabMode = 0;
3400     if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
3401         fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
3402     }
3403 
3404     // If EIS setprop is enabled then only turn it on for video/preview
3405     return  m_bEisEnable && (m_bIsVideo || fwkVideoStabMode) && m_bEisSupportedSize &&
3406         (m_ISTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
3407 }
3408 
3409 /*===========================================================================
3410  * FUNCTION   : validateCaptureRequest
3411  *
3412  * DESCRIPTION: validate a capture request from camera service
3413  *
3414  * PARAMETERS :
3415  *   @request : request from framework to process
3416  *
3417  * RETURN     :
3418  *
3419  *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request,List<InternalRequest> & internallyRequestedStreams)3420 int QCamera3HardwareInterface::validateCaptureRequest(
3421                     camera3_capture_request_t *request,
3422                     List<InternalRequest> &internallyRequestedStreams)
3423 {
3424     ssize_t idx = 0;
3425     const camera3_stream_buffer_t *b;
3426     CameraMetadata meta;
3427 
3428     /* Sanity check the request */
3429     if (request == NULL) {
3430         LOGE("NULL capture request");
3431         return BAD_VALUE;
3432     }
3433 
3434     if ((request->settings == NULL) && (mState == CONFIGURED)) {
3435         /*settings cannot be null for the first request*/
3436         return BAD_VALUE;
3437     }
3438 
3439     uint32_t frameNumber = request->frame_number;
3440     if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3441             && (internallyRequestedStreams.size() == 0)) {
3442         LOGE("Request %d: No output buffers provided!",
3443                 __FUNCTION__, frameNumber);
3444         return BAD_VALUE;
3445     }
3446     if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3447         LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3448                  request->num_output_buffers, MAX_NUM_STREAMS);
3449         return BAD_VALUE;
3450     }
3451     if (request->input_buffer != NULL) {
3452         b = request->input_buffer;
3453         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3454             LOGE("Request %d: Buffer %ld: Status not OK!",
3455                      frameNumber, (long)idx);
3456             return BAD_VALUE;
3457         }
3458         if (b->release_fence != -1) {
3459             LOGE("Request %d: Buffer %ld: Has a release fence!",
3460                      frameNumber, (long)idx);
3461             return BAD_VALUE;
3462         }
3463         if (b->buffer == NULL) {
3464             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3465                      frameNumber, (long)idx);
3466             return BAD_VALUE;
3467         }
3468     }
3469 
3470     // Validate all buffers
3471     b = request->output_buffers;
3472     if (b == NULL) {
3473        return BAD_VALUE;
3474     }
3475     while (idx < (ssize_t)request->num_output_buffers) {
3476         QCamera3ProcessingChannel *channel =
3477                 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3478         if (channel == NULL) {
3479             LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3480                      frameNumber, (long)idx);
3481             return BAD_VALUE;
3482         }
3483         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3484             LOGE("Request %d: Buffer %ld: Status not OK!",
3485                      frameNumber, (long)idx);
3486             return BAD_VALUE;
3487         }
3488         if (b->release_fence != -1) {
3489             LOGE("Request %d: Buffer %ld: Has a release fence!",
3490                      frameNumber, (long)idx);
3491             return BAD_VALUE;
3492         }
3493         if (b->buffer == NULL) {
3494             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3495                      frameNumber, (long)idx);
3496             return BAD_VALUE;
3497         }
3498         if (*(b->buffer) == NULL) {
3499             LOGE("Request %d: Buffer %ld: NULL private handle!",
3500                      frameNumber, (long)idx);
3501             return BAD_VALUE;
3502         }
3503         idx++;
3504         b = request->output_buffers + idx;
3505     }
3506     return NO_ERROR;
3507 }
3508 
3509 /*===========================================================================
3510  * FUNCTION   : deriveMinFrameDuration
3511  *
3512  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3513  *              on currently configured streams.
3514  *
3515  * PARAMETERS : NONE
3516  *
3517  * RETURN     : NONE
3518  *
3519  *==========================================================================*/
deriveMinFrameDuration()3520 void QCamera3HardwareInterface::deriveMinFrameDuration()
3521 {
3522     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3523     bool hasRaw = false;
3524 
3525     mMinRawFrameDuration = 0;
3526     mMinJpegFrameDuration = 0;
3527     mMinProcessedFrameDuration = 0;
3528 
3529     maxJpegDim = 0;
3530     maxProcessedDim = 0;
3531     maxRawDim = 0;
3532 
3533     // Figure out maximum jpeg, processed, and raw dimensions
3534     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3535         it != mStreamInfo.end(); it++) {
3536 
3537         // Input stream doesn't have valid stream_type
3538         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3539             continue;
3540 
3541         int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3542         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3543             if (dimension > maxJpegDim)
3544                 maxJpegDim = dimension;
3545         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3546                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3547                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3548             hasRaw = true;
3549             if (dimension > maxRawDim)
3550                 maxRawDim = dimension;
3551         } else {
3552             if (dimension > maxProcessedDim)
3553                 maxProcessedDim = dimension;
3554         }
3555     }
3556 
3557     size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3558             MAX_SIZES_CNT);
3559 
3560     //Assume all jpeg dimensions are in processed dimensions.
3561     if (maxJpegDim > maxProcessedDim)
3562         maxProcessedDim = maxJpegDim;
3563     //Find the smallest raw dimension that is greater or equal to jpeg dimension
3564     if (hasRaw && maxProcessedDim > maxRawDim) {
3565         maxRawDim = INT32_MAX;
3566 
3567         for (size_t i = 0; i < count; i++) {
3568             int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3569                     gCamCapability[mCameraId]->raw_dim[i].height;
3570             if (dimension >= maxProcessedDim && dimension < maxRawDim)
3571                 maxRawDim = dimension;
3572         }
3573     }
3574 
3575     //Find minimum durations for processed, jpeg, and raw
3576     for (size_t i = 0; i < count; i++) {
3577         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3578                 gCamCapability[mCameraId]->raw_dim[i].height) {
3579             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3580             break;
3581         }
3582     }
3583     count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3584     for (size_t i = 0; i < count; i++) {
3585         if (maxProcessedDim ==
3586                 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3587                 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3588             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3589             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3590             break;
3591         }
3592     }
3593 }
3594 
3595 /*===========================================================================
3596  * FUNCTION   : getMinFrameDuration
3597  *
3598  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3599  *              and current request configuration.
3600  *
3601  * PARAMETERS : @request: requset sent by the frameworks
3602  *
3603  * RETURN     : min farme duration for a particular request
3604  *
3605  *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)3606 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3607 {
3608     bool hasJpegStream = false;
3609     bool hasRawStream = false;
3610     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3611         const camera3_stream_t *stream = request->output_buffers[i].stream;
3612         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3613             hasJpegStream = true;
3614         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3615                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3616                 stream->format == HAL_PIXEL_FORMAT_RAW16)
3617             hasRawStream = true;
3618     }
3619 
3620     if (!hasJpegStream)
3621         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3622     else
3623         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3624 }
3625 
3626 /*===========================================================================
3627  * FUNCTION   : handleBuffersDuringFlushLock
3628  *
3629  * DESCRIPTION: Account for buffers returned from back-end during flush
3630  *              This function is executed while mMutex is held by the caller.
3631  *
3632  * PARAMETERS :
3633  *   @buffer: image buffer for the callback
3634  *
3635  * RETURN     :
3636  *==========================================================================*/
handleBuffersDuringFlushLock(camera3_stream_buffer_t * buffer)3637 void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3638 {
3639     bool buffer_found = false;
3640     for (List<PendingBuffersInRequest>::iterator req =
3641             mPendingBuffersMap.mPendingBuffersInRequest.begin();
3642             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3643         for (List<PendingBufferInfo>::iterator i =
3644                 req->mPendingBufferList.begin();
3645                 i != req->mPendingBufferList.end(); i++) {
3646             if (i->buffer == buffer->buffer) {
3647                 mPendingBuffersMap.numPendingBufsAtFlush--;
3648                 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3649                     buffer->buffer, req->frame_number,
3650                     mPendingBuffersMap.numPendingBufsAtFlush);
3651                 buffer_found = true;
3652                 break;
3653             }
3654         }
3655         if (buffer_found) {
3656             break;
3657         }
3658     }
3659     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3660         //signal the flush()
3661         LOGD("All buffers returned to HAL. Continue flush");
3662         pthread_cond_signal(&mBuffersCond);
3663     }
3664 }
3665 
3666 /*===========================================================================
3667  * FUNCTION   : handleBatchMetadata
3668  *
3669  * DESCRIPTION: Handles metadata buffer callback in batch mode
3670  *
3671  * PARAMETERS : @metadata_buf: metadata buffer
3672  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3673  *                 the meta buf in this method
3674  *
3675  * RETURN     :
3676  *
3677  *==========================================================================*/
handleBatchMetadata(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf)3678 void QCamera3HardwareInterface::handleBatchMetadata(
3679         mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3680 {
3681     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
3682 
3683     if (NULL == metadata_buf) {
3684         LOGE("metadata_buf is NULL");
3685         return;
3686     }
3687     /* In batch mode, the metdata will contain the frame number and timestamp of
3688      * the last frame in the batch. Eg: a batch containing buffers from request
3689      * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3690      * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3691      * multiple process_capture_results */
3692     metadata_buffer_t *metadata =
3693             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3694     int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3695     uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3696     uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3697     uint32_t frame_number = 0, urgent_frame_number = 0;
3698     int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3699     bool invalid_metadata = false;
3700     size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3701     size_t loopCount = 1;
3702     bool is_metabuf_queued = false;
3703 
3704     int32_t *p_frame_number_valid =
3705             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3706     uint32_t *p_frame_number =
3707             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3708     int64_t *p_capture_time =
3709             POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3710     int32_t *p_urgent_frame_number_valid =
3711             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3712     uint32_t *p_urgent_frame_number =
3713             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3714 
3715     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3716             (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3717             (NULL == p_urgent_frame_number)) {
3718         LOGE("Invalid metadata");
3719         invalid_metadata = true;
3720     } else {
3721         frame_number_valid = *p_frame_number_valid;
3722         last_frame_number = *p_frame_number;
3723         last_frame_capture_time = *p_capture_time;
3724         urgent_frame_number_valid = *p_urgent_frame_number_valid;
3725         last_urgent_frame_number = *p_urgent_frame_number;
3726     }
3727 
3728     /* In batchmode, when no video buffers are requested, set_parms are sent
3729      * for every capture_request. The difference between consecutive urgent
3730      * frame numbers and frame numbers should be used to interpolate the
3731      * corresponding frame numbers and time stamps */
3732     pthread_mutex_lock(&mMutex);
3733     if (urgent_frame_number_valid) {
3734         ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3735         if(idx < 0) {
3736             LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3737                 last_urgent_frame_number);
3738             mState = ERROR;
3739             pthread_mutex_unlock(&mMutex);
3740             return;
3741         }
3742         first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
3743         urgentFrameNumDiff = last_urgent_frame_number + 1 -
3744                 first_urgent_frame_number;
3745 
3746         LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3747                  urgent_frame_number_valid,
3748                 first_urgent_frame_number, last_urgent_frame_number);
3749     }
3750 
3751     if (frame_number_valid) {
3752         ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3753         if(idx < 0) {
3754             LOGE("Invalid frame number received: %d. Irrecoverable error",
3755                 last_frame_number);
3756             mState = ERROR;
3757             pthread_mutex_unlock(&mMutex);
3758             return;
3759         }
3760         first_frame_number = mPendingBatchMap.valueAt(idx);
3761         frameNumDiff = last_frame_number + 1 -
3762                 first_frame_number;
3763         mPendingBatchMap.removeItem(last_frame_number);
3764 
3765         LOGD("frm: valid: %d frm_num: %d - %d",
3766                  frame_number_valid,
3767                 first_frame_number, last_frame_number);
3768 
3769     }
3770     pthread_mutex_unlock(&mMutex);
3771 
3772     if (urgent_frame_number_valid || frame_number_valid) {
3773         loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3774         if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3775             LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3776                      urgentFrameNumDiff, last_urgent_frame_number);
3777         if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3778             LOGE("frameNumDiff: %d frameNum: %d",
3779                      frameNumDiff, last_frame_number);
3780     }
3781 
3782     for (size_t i = 0; i < loopCount; i++) {
3783         /* handleMetadataWithLock is called even for invalid_metadata for
3784          * pipeline depth calculation */
3785         if (!invalid_metadata) {
3786             /* Infer frame number. Batch metadata contains frame number of the
3787              * last frame */
3788             if (urgent_frame_number_valid) {
3789                 if (i < urgentFrameNumDiff) {
3790                     urgent_frame_number =
3791                             first_urgent_frame_number + i;
3792                     LOGD("inferred urgent frame_number: %d",
3793                              urgent_frame_number);
3794                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3795                             CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3796                 } else {
3797                     /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3798                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3799                             CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3800                 }
3801             }
3802 
3803             /* Infer frame number. Batch metadata contains frame number of the
3804              * last frame */
3805             if (frame_number_valid) {
3806                 if (i < frameNumDiff) {
3807                     frame_number = first_frame_number + i;
3808                     LOGD("inferred frame_number: %d", frame_number);
3809                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3810                             CAM_INTF_META_FRAME_NUMBER, frame_number);
3811                 } else {
3812                     /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3813                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3814                              CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3815                 }
3816             }
3817 
3818             if (last_frame_capture_time) {
3819                 //Infer timestamp
3820                 first_frame_capture_time = last_frame_capture_time -
3821                         (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
3822                 capture_time =
3823                         first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
3824                 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3825                         CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3826                 LOGD("batch capture_time: %lld, capture_time: %lld",
3827                          last_frame_capture_time, capture_time);
3828             }
3829         }
3830         pthread_mutex_lock(&mMutex);
3831         handleMetadataWithLock(metadata_buf,
3832                 false /* free_and_bufdone_meta_buf */,
3833                 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3834                 (i == frameNumDiff-1), /* last metadata in the batch metadata */
3835                 &is_metabuf_queued /* if metabuf isqueued or not */);
3836         pthread_mutex_unlock(&mMutex);
3837     }
3838 
3839     /* BufDone metadata buffer */
3840     if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
3841         mMetadataChannel->bufDone(metadata_buf);
3842         free(metadata_buf);
3843         metadata_buf = NULL;
3844     }
3845 }
3846 
notifyError(uint32_t frameNumber,camera3_error_msg_code_t errorCode)3847 void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3848         camera3_error_msg_code_t errorCode)
3849 {
3850     camera3_notify_msg_t notify_msg;
3851     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3852     notify_msg.type = CAMERA3_MSG_ERROR;
3853     notify_msg.message.error.error_code = errorCode;
3854     notify_msg.message.error.error_stream = NULL;
3855     notify_msg.message.error.frame_number = frameNumber;
3856     orchestrateNotify(&notify_msg);
3857 
3858     return;
3859 }
3860 
3861 /*===========================================================================
3862  * FUNCTION   : sendPartialMetadataWithLock
3863  *
3864  * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3865  *
3866  * PARAMETERS : @metadata: metadata buffer
3867  *              @requestIter: The iterator for the pending capture request for
3868  *              which the partial result is being sen
3869  *              @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3870  *                  last urgent metadata in a batch. Always true for non-batch mode
3871  *              @isJumpstartMetadata: Whether this is a partial metadata for
3872  *              jumpstart, i.e. even though it doesn't map to a valid partial
3873  *              frame number, its metadata entries should be kept.
3874  *
3875  * RETURN     :
3876  *
3877  *==========================================================================*/
3878 
sendPartialMetadataWithLock(metadata_buffer_t * metadata,const pendingRequestIterator requestIter,bool lastUrgentMetadataInBatch,bool isJumpstartMetadata)3879 void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3880         metadata_buffer_t *metadata,
3881         const pendingRequestIterator requestIter,
3882         bool lastUrgentMetadataInBatch,
3883         bool isJumpstartMetadata)
3884 {
3885     camera3_capture_result_t result;
3886     memset(&result, 0, sizeof(camera3_capture_result_t));
3887 
3888     requestIter->partial_result_cnt++;
3889 
3890     // Extract 3A metadata
3891     result.result = translateCbUrgentMetadataToResultMetadata(
3892             metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3893             isJumpstartMetadata);
3894     // Populate metadata result
3895     result.frame_number = requestIter->frame_number;
3896     result.num_output_buffers = 0;
3897     result.output_buffers = NULL;
3898     result.partial_result = requestIter->partial_result_cnt;
3899 
3900     {
3901         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3902         if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3903             // Notify HDR+ client about the partial metadata.
3904             gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3905             result.partial_result == PARTIAL_RESULT_COUNT);
3906         }
3907     }
3908 
3909     orchestrateResult(&result);
3910     LOGD("urgent frame_number = %u", result.frame_number);
3911     free_camera_metadata((camera_metadata_t *)result.result);
3912 }
3913 
3914 /*===========================================================================
3915  * FUNCTION   : handleMetadataWithLock
3916  *
3917  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3918  *
3919  * PARAMETERS : @metadata_buf: metadata buffer
3920  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3921  *                 the meta buf in this method
3922  *              @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3923  *                  last urgent metadata in a batch. Always true for non-batch mode
3924  *              @lastMetadataInBatch: Boolean to indicate whether this is the
3925  *                  last metadata in a batch. Always true for non-batch mode
3926  *              @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3927  *                  buffer is enqueued or not.
3928  *
3929  * RETURN     :
3930  *
3931  *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf,bool lastUrgentMetadataInBatch,bool lastMetadataInBatch,bool * p_is_metabuf_queued)3932 void QCamera3HardwareInterface::handleMetadataWithLock(
3933     mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
3934     bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3935     bool *p_is_metabuf_queued)
3936 {
3937     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
3938     if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3939         //during flush do not send metadata from this thread
3940         LOGD("not sending metadata during flush or when mState is error");
3941         if (free_and_bufdone_meta_buf) {
3942             mMetadataChannel->bufDone(metadata_buf);
3943             free(metadata_buf);
3944         }
3945         return;
3946     }
3947 
3948     //not in flush
3949     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3950     int32_t frame_number_valid, urgent_frame_number_valid;
3951     uint32_t frame_number, urgent_frame_number;
3952     int64_t capture_time, capture_time_av;
3953     nsecs_t currentSysTime;
3954 
3955     int32_t *p_frame_number_valid =
3956             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3957     uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3958     int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3959     int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
3960     int32_t *p_urgent_frame_number_valid =
3961             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3962     uint32_t *p_urgent_frame_number =
3963             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3964     IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3965             metadata) {
3966         LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3967                  *p_frame_number_valid, *p_frame_number);
3968     }
3969 
3970     camera_metadata_t *resultMetadata = nullptr;
3971 
3972     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3973             (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3974         LOGE("Invalid metadata");
3975         if (free_and_bufdone_meta_buf) {
3976             mMetadataChannel->bufDone(metadata_buf);
3977             free(metadata_buf);
3978         }
3979         goto done_metadata;
3980     }
3981     frame_number_valid =        *p_frame_number_valid;
3982     frame_number =              *p_frame_number;
3983     capture_time =              *p_capture_time;
3984     capture_time_av =           *p_capture_time_av;
3985     urgent_frame_number_valid = *p_urgent_frame_number_valid;
3986     urgent_frame_number =       *p_urgent_frame_number;
3987     currentSysTime =            systemTime(CLOCK_MONOTONIC);
3988 
3989     if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3990         const int tries = 3;
3991         nsecs_t bestGap, measured;
3992         for (int i = 0; i < tries; ++i) {
3993             const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3994             const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3995             const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3996             const nsecs_t gap = tmono2 - tmono;
3997             if (i == 0 || gap < bestGap) {
3998                 bestGap = gap;
3999                 measured = tbase - ((tmono + tmono2) >> 1);
4000             }
4001         }
4002         capture_time -= measured;
4003     }
4004 
4005     // Detect if buffers from any requests are overdue
4006     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4007         int64_t timeout;
4008         {
4009             Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
4010             // If there is a pending HDR+ request, the following requests may be blocked until the
4011             // HDR+ request is done. So allow a longer timeout.
4012             timeout = (mHdrPlusPendingRequests.size() > 0) ?
4013                     MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
4014             timeout = s2ns(timeout);
4015             if (timeout < mExpectedInflightDuration) {
4016                 timeout = mExpectedInflightDuration;
4017             }
4018         }
4019 
4020         if ((currentSysTime - req.timestamp) > timeout) {
4021             for (auto &missed : req.mPendingBufferList) {
4022                 assert(missed.stream->priv);
4023                 if (missed.stream->priv) {
4024                     QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
4025                     assert(ch->mStreams[0]);
4026                     if (ch->mStreams[0]) {
4027                         LOGE("Cancel missing frame = %d, buffer = %p,"
4028                             "stream type = %d, stream format = %d",
4029                             req.frame_number, missed.buffer,
4030                             ch->mStreams[0]->getMyType(), missed.stream->format);
4031                         ch->timeoutFrame(req.frame_number);
4032                     }
4033                 }
4034             }
4035         }
4036     }
4037     //For the very first metadata callback, regardless whether it contains valid
4038     //frame number, send the partial metadata for the jumpstarting requests.
4039     //Note that this has to be done even if the metadata doesn't contain valid
4040     //urgent frame number, because in the case only 1 request is ever submitted
4041     //to HAL, there won't be subsequent valid urgent frame number.
4042     if (mFirstMetadataCallback) {
4043         for (pendingRequestIterator i =
4044                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
4045             if (i->bUseFirstPartial) {
4046                 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
4047                         true /*isJumpstartMetadata*/);
4048             }
4049         }
4050         mFirstMetadataCallback = false;
4051     }
4052 
4053     //Partial result on process_capture_result for timestamp
4054     if (urgent_frame_number_valid) {
4055         LOGD("valid urgent frame_number = %u", urgent_frame_number);
4056 
4057         //Recieved an urgent Frame Number, handle it
4058         //using partial results
4059         for (pendingRequestIterator i =
4060                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
4061             LOGD("Iterator Frame = %d urgent frame = %d",
4062                  i->frame_number, urgent_frame_number);
4063 
4064             if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
4065                     (i->partial_result_cnt == 0)) {
4066                 LOGE("Error: HAL missed urgent metadata for frame number %d",
4067                          i->frame_number);
4068                 i->partialResultDropped = true;
4069                 i->partial_result_cnt++;
4070             }
4071 
4072             if (i->frame_number == urgent_frame_number &&
4073                      i->partial_result_cnt == 0) {
4074                 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
4075                         false /*isJumpstartMetadata*/);
4076                 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
4077                     // Instant AEC settled for this frame.
4078                     LOGH("instant AEC settled for frame number %d", urgent_frame_number);
4079                     mInstantAECSettledFrameNumber = urgent_frame_number;
4080                 }
4081                 break;
4082             }
4083         }
4084     }
4085 
4086     if (!frame_number_valid) {
4087         LOGD("Not a valid normal frame number, used as SOF only");
4088         if (free_and_bufdone_meta_buf) {
4089             mMetadataChannel->bufDone(metadata_buf);
4090             free(metadata_buf);
4091         }
4092         goto done_metadata;
4093     }
4094     LOGH("valid frame_number = %u, capture_time = %lld",
4095             frame_number, capture_time);
4096 
4097     handleDepthDataLocked(metadata->depth_data, frame_number,
4098             metadata->is_depth_data_valid);
4099 
4100     // Check whether any stream buffer corresponding to this is dropped or not
4101     // If dropped, then send the ERROR_BUFFER for the corresponding stream
4102     // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
4103     for (auto & pendingRequest : mPendingRequestsList) {
4104         if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
4105                     mInstantAECSettledFrameNumber)) {
4106             camera3_notify_msg_t notify_msg = {};
4107             for (auto & buffer : pendingRequest.buffers) {
4108                 bool dropFrame = false;
4109                 QCamera3ProcessingChannel *channel =
4110                         (QCamera3ProcessingChannel *)buffer.stream->priv;
4111                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4112                 if (p_cam_frame_drop) {
4113                     for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
4114                         if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
4115                             // Got the stream ID for drop frame.
4116                             dropFrame = true;
4117                             break;
4118                         }
4119                     }
4120                 } else {
4121                     // This is instant AEC case.
4122                     // For instant AEC drop the stream untill AEC is settled.
4123                     dropFrame = true;
4124                 }
4125 
4126                 if (dropFrame) {
4127                     // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
4128                     if (p_cam_frame_drop) {
4129                         // Treat msg as error for system buffer drops
4130                         LOGE("Start of reporting error frame#=%u, streamID=%u",
4131                                  pendingRequest.frame_number, streamID);
4132                     } else {
4133                         // For instant AEC, inform frame drop and frame number
4134                         LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
4135                                 "AEC settled frame number = %u",
4136                                 pendingRequest.frame_number, streamID,
4137                                 mInstantAECSettledFrameNumber);
4138                     }
4139                     notify_msg.type = CAMERA3_MSG_ERROR;
4140                     notify_msg.message.error.frame_number = pendingRequest.frame_number;
4141                     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
4142                     notify_msg.message.error.error_stream = buffer.stream;
4143                     orchestrateNotify(&notify_msg);
4144                     if (p_cam_frame_drop) {
4145                         // Treat msg as error for system buffer drops
4146                         LOGE("End of reporting error frame#=%u, streamID=%u",
4147                                 pendingRequest.frame_number, streamID);
4148                     } else {
4149                         // For instant AEC, inform frame drop and frame number
4150                         LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
4151                                 "AEC settled frame number = %u",
4152                                 pendingRequest.frame_number, streamID,
4153                                 mInstantAECSettledFrameNumber);
4154                     }
4155                     PendingFrameDropInfo PendingFrameDrop;
4156                     PendingFrameDrop.frame_number = pendingRequest.frame_number;
4157                     PendingFrameDrop.stream_ID = streamID;
4158                     // Add the Frame drop info to mPendingFrameDropList
4159                     mPendingFrameDropList.push_back(PendingFrameDrop);
4160                 }
4161             }
4162         }
4163     }
4164 
4165     for (auto & pendingRequest : mPendingRequestsList) {
4166         // Find the pending request with the frame number.
4167         if (pendingRequest.frame_number < frame_number) {
4168             // Workaround for case where shutter is missing due to dropped
4169             // metadata
4170             if (!pendingRequest.hdrplus && (pendingRequest.input_buffer == nullptr)) {
4171                 mShutterDispatcher.markShutterReady(pendingRequest.frame_number, capture_time);
4172             }
4173         } else if (pendingRequest.frame_number == frame_number) {
4174             // Update the sensor timestamp.
4175             pendingRequest.timestamp = capture_time;
4176 
4177 
4178             /* Set the timestamp in display metadata so that clients aware of
4179                private_handle such as VT can use this un-modified timestamps.
4180                Camera framework is unaware of this timestamp and cannot change this */
4181             updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
4182 
4183             // Find channel requiring metadata, meaning internal offline postprocess
4184             // is needed.
4185             //TODO: for now, we don't support two streams requiring metadata at the same time.
4186             // (because we are not making copies, and metadata buffer is not reference counted.
4187             bool internalPproc = false;
4188             for (pendingBufferIterator iter = pendingRequest.buffers.begin();
4189                     iter != pendingRequest.buffers.end(); iter++) {
4190                 if (iter->need_metadata) {
4191                     internalPproc = true;
4192                     QCamera3ProcessingChannel *channel =
4193                             (QCamera3ProcessingChannel *)iter->stream->priv;
4194 
4195                     if (iter->need_crop) {
4196                         QCamera3Stream *stream = channel->getStreamByIndex(0);
4197 
4198                         // Map the EIS crop to respective stream crop and append it.
4199                         IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA,
4200                                 metadata) {
4201                             for (int j = 0; j < crop_data->num_of_streams; j++) {
4202                                 if ((stream != nullptr) &&
4203                                         (stream->getMyServerID() ==
4204                                          crop_data->crop_info[j].stream_id)) {
4205 
4206                                     cam_dimension_t streamDim;
4207                                     if (stream->getFrameDimension(streamDim) != NO_ERROR) {
4208                                         LOGE("%s: Failed obtaining stream dimensions!", __func__);
4209                                         continue;
4210                                     }
4211 
4212                                     mStreamCropMapper.update(
4213                                             gCamCapability[mCameraId]->active_array_size.width,
4214                                             gCamCapability[mCameraId]->active_array_size.height,
4215                                             streamDim.width, streamDim.height);
4216 
4217                                     cam_eis_crop_info_t eisCrop = iter->crop_info;
4218                                     mStreamCropMapper.toSensor(eisCrop.delta_x, eisCrop.delta_y,
4219                                             eisCrop.delta_width, eisCrop.delta_height);
4220 
4221                                     int32_t crop[4] = {
4222                                         crop_data->crop_info[j].crop.left   + eisCrop.delta_x,
4223                                         crop_data->crop_info[j].crop.top    + eisCrop.delta_y,
4224                                         crop_data->crop_info[j].crop.width  - eisCrop.delta_width,
4225                                         crop_data->crop_info[j].crop.height - eisCrop.delta_height
4226                                     };
4227 
4228                                     if (isCropValid(crop[0], crop[1], crop[2], crop[3],
4229                                                 streamDim.width, streamDim.height)) {
4230                                         crop_data->crop_info[j].crop.left   = crop[0];
4231                                         crop_data->crop_info[j].crop.top    = crop[1];
4232                                         crop_data->crop_info[j].crop.width  = crop[2];
4233                                         crop_data->crop_info[j].crop.height = crop[3];
4234                                     } else {
4235                                         LOGE("Invalid EIS compensated crop region");
4236                                     }
4237 
4238                                     break;
4239                                 }
4240                             }
4241                         }
4242                     }
4243 
4244                     channel->queueReprocMetadata(metadata_buf);
4245                     if(p_is_metabuf_queued != NULL) {
4246                         *p_is_metabuf_queued = true;
4247                     }
4248                     iter->need_metadata = false;
4249                     break;
4250                 }
4251             }
4252             for (auto itr = pendingRequest.internalRequestList.begin();
4253                   itr != pendingRequest.internalRequestList.end(); itr++) {
4254                 if (itr->need_metadata) {
4255                     internalPproc = true;
4256                     QCamera3ProcessingChannel *channel =
4257                             (QCamera3ProcessingChannel *)itr->stream->priv;
4258                     channel->queueReprocMetadata(metadata_buf);
4259                     break;
4260                 }
4261             }
4262 
4263             saveExifParams(metadata);
4264 
4265             bool *enableZsl = nullptr;
4266             if (gExposeEnableZslKey) {
4267                 enableZsl = &pendingRequest.enableZsl;
4268             }
4269 
4270             resultMetadata = translateFromHalMetadata(metadata,
4271                     pendingRequest, internalPproc,
4272                     lastMetadataInBatch, enableZsl);
4273 
4274             updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
4275 
4276             if (pendingRequest.blob_request) {
4277                 //Dump tuning metadata if enabled and available
4278                 char prop[PROPERTY_VALUE_MAX];
4279                 memset(prop, 0, sizeof(prop));
4280                 property_get("persist.camera.dumpmetadata", prop, "0");
4281                 int32_t enabled = atoi(prop);
4282                 if (enabled && metadata->is_tuning_params_valid) {
4283                     dumpMetadataToFile(metadata->tuning_params,
4284                            mMetaFrameCount,
4285                            enabled,
4286                            "Snapshot",
4287                            frame_number);
4288                 }
4289             }
4290 
4291             if (!internalPproc) {
4292                 LOGD("couldn't find need_metadata for this metadata");
4293                 // Return metadata buffer
4294                 if (free_and_bufdone_meta_buf) {
4295                     mMetadataChannel->bufDone(metadata_buf);
4296                     free(metadata_buf);
4297                 }
4298             }
4299 
4300             break;
4301         }
4302     }
4303 
4304     mShutterDispatcher.markShutterReady(frame_number, capture_time);
4305 
4306     // Try to send out capture result metadata.
4307     handlePendingResultMetadataWithLock(frame_number,  resultMetadata);
4308     return;
4309 
4310 done_metadata:
4311     for (pendingRequestIterator i = mPendingRequestsList.begin();
4312             i != mPendingRequestsList.end() ;i++) {
4313         i->pipeline_depth++;
4314     }
4315     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4316     unblockRequestIfNecessary();
4317 }
4318 
4319 /*===========================================================================
4320  * FUNCTION   : handleDepthDataWithLock
4321  *
4322  * DESCRIPTION: Handles incoming depth data
4323  *
4324  * PARAMETERS : @depthData  : Depth data
4325  *              @frameNumber: Frame number of the incoming depth data
4326  *              @valid      : Valid flag for the incoming data
4327  *
4328  * RETURN     :
4329  *
4330  *==========================================================================*/
handleDepthDataLocked(const cam_depth_data_t & depthData,uint32_t frameNumber,uint8_t valid)4331 void QCamera3HardwareInterface::handleDepthDataLocked(
4332         const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
4333     uint32_t currentFrameNumber;
4334     buffer_handle_t *depthBuffer;
4335 
4336     if (nullptr == mDepthChannel) {
4337         return;
4338     }
4339 
4340     camera3_stream_buffer_t resultBuffer =
4341         {.acquire_fence = -1,
4342          .release_fence = -1,
4343          .status = CAMERA3_BUFFER_STATUS_OK,
4344          .buffer = nullptr,
4345          .stream = mDepthChannel->getStream()};
4346     do {
4347         depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
4348         if (nullptr == depthBuffer) {
4349             break;
4350         }
4351 
4352         resultBuffer.buffer = depthBuffer;
4353         if (currentFrameNumber == frameNumber) {
4354             if (valid) {
4355                 int32_t rc = mDepthChannel->populateDepthData(depthData,
4356                         frameNumber);
4357                 if (NO_ERROR != rc) {
4358                     resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
4359                 } else {
4360                     resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
4361                 }
4362             } else {
4363                 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
4364             }
4365         } else if (currentFrameNumber > frameNumber) {
4366             break;
4367         } else {
4368             camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
4369                     {{currentFrameNumber, mDepthChannel->getStream(),
4370                             CAMERA3_MSG_ERROR_BUFFER}}};
4371             orchestrateNotify(&notify_msg);
4372 
4373             LOGE("Depth buffer for frame number: %d is missing "
4374                     "returning back!", currentFrameNumber);
4375             resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
4376         }
4377         mDepthChannel->unmapBuffer(currentFrameNumber);
4378         mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
4379     } while (currentFrameNumber < frameNumber);
4380 }
4381 
4382 /*===========================================================================
4383  * FUNCTION   : notifyErrorFoPendingDepthData
4384  *
4385  * DESCRIPTION: Returns error for any pending depth buffers
4386  *
4387  * PARAMETERS : depthCh - depth channel that needs to get flushed
4388  *
4389  * RETURN     :
4390  *
4391  *==========================================================================*/
notifyErrorFoPendingDepthData(QCamera3DepthChannel * depthCh)4392 void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4393         QCamera3DepthChannel *depthCh) {
4394     uint32_t currentFrameNumber;
4395     buffer_handle_t *depthBuffer;
4396 
4397     if (nullptr == depthCh) {
4398         return;
4399     }
4400 
4401     camera3_notify_msg_t notify_msg =
4402         {.type = CAMERA3_MSG_ERROR,
4403                 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4404     camera3_stream_buffer_t resultBuffer =
4405         {.acquire_fence = -1,
4406          .release_fence = -1,
4407          .buffer = nullptr,
4408          .stream = depthCh->getStream(),
4409          .status = CAMERA3_BUFFER_STATUS_ERROR};
4410 
4411     while (nullptr !=
4412             (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4413         depthCh->unmapBuffer(currentFrameNumber);
4414 
4415         notify_msg.message.error.frame_number = currentFrameNumber;
4416         orchestrateNotify(&notify_msg);
4417 
4418         mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
4419     };
4420 }
4421 
4422 /*===========================================================================
4423  * FUNCTION   : hdrPlusPerfLock
4424  *
4425  * DESCRIPTION: perf lock for HDR+ using custom intent
4426  *
4427  * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4428  *
4429  * RETURN     : None
4430  *
4431  *==========================================================================*/
hdrPlusPerfLock(mm_camera_super_buf_t * metadata_buf)4432 void QCamera3HardwareInterface::hdrPlusPerfLock(
4433         mm_camera_super_buf_t *metadata_buf)
4434 {
4435     if (NULL == metadata_buf) {
4436         LOGE("metadata_buf is NULL");
4437         return;
4438     }
4439     metadata_buffer_t *metadata =
4440             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4441     int32_t *p_frame_number_valid =
4442             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4443     uint32_t *p_frame_number =
4444             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4445 
4446     if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4447         LOGE("%s: Invalid metadata", __func__);
4448         return;
4449     }
4450 
4451     //acquire perf lock for 2 secs after the last HDR frame is captured
4452     constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
4453     if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4454         if ((p_frame_number != NULL) &&
4455                 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
4456             mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
4457         }
4458     }
4459 }
4460 
4461 /*===========================================================================
4462  * FUNCTION   : handleInputBufferWithLock
4463  *
4464  * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4465  *
4466  * PARAMETERS : @frame_number: frame number of the input buffer
4467  *
4468  * RETURN     :
4469  *
4470  *==========================================================================*/
handleInputBufferWithLock(uint32_t frame_number)4471 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4472 {
4473     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
4474     pendingRequestIterator i = mPendingRequestsList.begin();
4475     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4476         i++;
4477     }
4478     if (i != mPendingRequestsList.end() && i->input_buffer) {
4479         //found the right request
4480         CameraMetadata settings;
4481         nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4482         if(i->settings) {
4483             settings = i->settings;
4484             if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4485                 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
4486             } else {
4487                 LOGE("No timestamp in input settings! Using current one.");
4488             }
4489         } else {
4490             LOGE("Input settings missing!");
4491         }
4492 
4493         mShutterDispatcher.markShutterReady(frame_number, capture_time);
4494         LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4495                     i->frame_number, capture_time);
4496 
4497         camera3_capture_result result;
4498         memset(&result, 0, sizeof(camera3_capture_result));
4499         result.frame_number = frame_number;
4500         result.result = i->settings;
4501         result.input_buffer = i->input_buffer;
4502         result.partial_result = PARTIAL_RESULT_COUNT;
4503 
4504         orchestrateResult(&result);
4505         LOGD("Input request metadata and input buffer frame_number = %u",
4506                         i->frame_number);
4507         i = erasePendingRequest(i);
4508 
4509         // Dispatch result metadata that may be just unblocked by this reprocess result.
4510         dispatchResultMetadataWithLock(frame_number, REPROCESS, false/*isHdrPlus*/);
4511     } else {
4512         LOGE("Could not find input request for frame number %d", frame_number);
4513     }
4514 }
4515 
4516 /*===========================================================================
4517  * FUNCTION   : handleBufferWithLock
4518  *
4519  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4520  *
4521  * PARAMETERS : @buffer: image buffer for the callback
4522  *              @frame_number: frame number of the image buffer
4523  *
4524  * RETURN     :
4525  *
4526  *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)4527 void QCamera3HardwareInterface::handleBufferWithLock(
4528     camera3_stream_buffer_t *buffer, uint32_t frame_number)
4529 {
4530     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
4531 
4532     if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4533         mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4534     }
4535 
4536     /* Nothing to be done during error state */
4537     if ((ERROR == mState) || (DEINIT == mState)) {
4538         return;
4539     }
4540     if (mFlushPerf) {
4541         handleBuffersDuringFlushLock(buffer);
4542         return;
4543     }
4544     //not in flush
4545     // If the frame number doesn't exist in the pending request list,
4546     // directly send the buffer to the frameworks, and update pending buffers map
4547     // Otherwise, book-keep the buffer.
4548     pendingRequestIterator i = mPendingRequestsList.begin();
4549     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4550         i++;
4551     }
4552 
4553     if (i != mPendingRequestsList.end()) {
4554         if (i->input_buffer) {
4555             // For a reprocessing request, try to send out result metadata.
4556             handlePendingResultMetadataWithLock(frame_number, nullptr);
4557         }
4558     }
4559 
4560     // Check if this frame was dropped.
4561     for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4562             m != mPendingFrameDropList.end(); m++) {
4563         QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4564         uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4565         if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4566             buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4567             LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4568                      frame_number, streamID);
4569             m = mPendingFrameDropList.erase(m);
4570             break;
4571         }
4572     }
4573 
4574     // WAR for encoder avtimer timestamp issue
4575     QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4576     if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4577         m_bAVTimerEnabled) {
4578         for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4579             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4580             if (req->frame_number != frame_number)
4581                 continue;
4582             if(req->av_timestamp == 0) {
4583                 buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4584             }
4585             else {
4586                 struct private_handle_t *priv_handle =
4587                     (struct private_handle_t *) (*(buffer->buffer));
4588                 setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4589             }
4590         }
4591     }
4592 
4593     buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4594     LOGH("result frame_number = %d, buffer = %p",
4595              frame_number, buffer->buffer);
4596 
4597     mPendingBuffersMap.removeBuf(buffer->buffer);
4598     mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4599 
4600     if (mPreviewStarted == false) {
4601         QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4602         if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
4603             logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4604 
4605             mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4606             mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4607             mPreviewStarted = true;
4608 
4609             // Set power hint for preview
4610             mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4611         }
4612     }
4613 }
4614 
removeUnrequestedMetadata(pendingRequestIterator requestIter,camera_metadata_t * resultMetadata)4615 void QCamera3HardwareInterface::removeUnrequestedMetadata(pendingRequestIterator requestIter,
4616         camera_metadata_t *resultMetadata) {
4617     CameraMetadata metadata;
4618     metadata.acquire(resultMetadata);
4619 
4620     // Remove len shading map if it's not requested.
4621     if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF &&
4622             metadata.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE) &&
4623             metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0] !=
4624             ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4625         metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4626         metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4627             &requestIter->requestedLensShadingMapMode, 1);
4628     }
4629 
4630     // Remove face information if it's not requested.
4631     if (requestIter->requestedFaceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF &&
4632             metadata.exists(ANDROID_STATISTICS_FACE_DETECT_MODE) &&
4633             metadata.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0] !=
4634             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4635         metadata.erase(ANDROID_STATISTICS_FACE_RECTANGLES);
4636         metadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE,
4637                 &requestIter->requestedFaceDetectMode, 1);
4638     }
4639 
4640     requestIter->resultMetadata = metadata.release();
4641 }
4642 
handlePendingResultMetadataWithLock(uint32_t frameNumber,camera_metadata_t * resultMetadata)4643 void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
4644         camera_metadata_t *resultMetadata)
4645 {
4646     // Find the pending request for this result metadata.
4647     auto requestIter = mPendingRequestsList.begin();
4648     while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4649         requestIter++;
4650     }
4651 
4652     if (requestIter == mPendingRequestsList.end()) {
4653         ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4654         return;
4655     }
4656 
4657     // Update the result metadata
4658     requestIter->resultMetadata = resultMetadata;
4659 
4660     // Check what type of request this is.
4661     RequestType requestType = (requestIter->input_buffer != nullptr) ?  REPROCESS :
4662             (isStillZsl(*requestIter) ? ZSL : NORMAL);
4663     if (requestIter->hdrplus) {
4664         // HDR+ request doesn't have partial results.
4665         requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4666     } else if (requestType == REPROCESS) {
4667         // Reprocessing request result is the same as settings.
4668         requestIter->resultMetadata = requestIter->settings;
4669         // Reprocessing request doesn't have partial results.
4670         requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4671     } else {
4672         if ((requestIter->partial_result_cnt == 0) && !requestIter->partialResultDropped) {
4673             LOGE("Urgent metadata for frame number: %d didn't arrive!", frameNumber);
4674             requestIter->partialResultDropped = true;
4675         }
4676         requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4677         mPendingLiveRequest--;
4678 
4679         {
4680             std::unique_lock<std::mutex> l(gHdrPlusClientLock);
4681             // For a live request, send the metadata to HDR+ client.
4682             if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4683                 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4684                     requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4685             }
4686         }
4687     }
4688 
4689     if (requestType != REPROCESS) {
4690         removeUnrequestedMetadata(requestIter, resultMetadata);
4691     }
4692 
4693     dispatchResultMetadataWithLock(frameNumber, requestType, requestIter->hdrplus);
4694 }
4695 
dispatchResultMetadataWithLock(uint32_t frameNumber,RequestType requestType,bool isHdrPlus)4696 void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4697         RequestType requestType, bool isHdrPlus) {
4698     // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4699     // to be sent if all previous pending requests are ready to be sent.
4700     bool readyToSend = true;
4701 
4702     // Iterate through the pending requests to send out result metadata that are ready. Also if
4703     // this result metadata belongs to a live request, notify errors for previous live requests
4704     // that don't have result metadata yet.
4705     // Note: a live request is either a NORMAL request, or a ZSL non-hdrplus request.
4706     bool isLiveRequest = requestType != REPROCESS && !isHdrPlus;
4707     auto iter = mPendingRequestsList.begin();
4708     while (iter != mPendingRequestsList.end()) {
4709         bool thisIsStillZsl = isStillZsl(*iter);
4710         RequestType thisRequestType = (iter->input_buffer != nullptr) ? REPROCESS :
4711                 (thisIsStillZsl ? ZSL : NORMAL);
4712         if (thisRequestType != requestType) {
4713             iter++;
4714             continue;
4715         }
4716         // Check if current pending request is ready. If it's not ready, the following pending
4717         // requests are also not ready.
4718         readyToSend &= iter->resultMetadata != nullptr;
4719 
4720         bool thisLiveRequest = !iter->hdrplus && iter->input_buffer == nullptr;
4721         bool errorResult = false;
4722 
4723         camera3_capture_result_t result = {};
4724         result.frame_number = iter->frame_number;
4725         result.result = iter->resultMetadata;
4726         result.partial_result = iter->partial_result_cnt;
4727 
4728         // If this pending buffer has result metadata, we may be able to send it out.
4729         if (iter->resultMetadata != nullptr) {
4730             if (!readyToSend) {
4731                 // If any of the previous pending request is not ready, this pending request is
4732                 // also not ready to send in order to keep shutter callbacks and result metadata
4733                 // in order.
4734                 iter++;
4735                 continue;
4736             }
4737             // Notify ERROR_RESULT if partial result was dropped.
4738             errorResult = iter->partialResultDropped;
4739         } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
4740             // If the result metadata belongs to a live request, notify errors for previous pending
4741             // live requests.
4742             mPendingLiveRequest--;
4743 
4744             LOGE("Error: HAL missed metadata for frame number %d", iter->frame_number);
4745             errorResult = true;
4746         } else {
4747             iter++;
4748             continue;
4749         }
4750 
4751         if (errorResult) {
4752             // Check for any buffers that might be stuck in the post-process input queue
4753             // awaiting metadata and queue an empty meta buffer. The invalid data should
4754             // fail the offline post-process pass and return any buffers that otherwise
4755             // will become lost.
4756             for (auto it = iter->buffers.begin(); it != iter->buffers.end(); it++) {
4757                 if (it->need_metadata) {
4758                     QCamera3ProcessingChannel *channel =
4759                         reinterpret_cast<QCamera3ProcessingChannel *> (it->stream->priv);
4760                     if (channel != nullptr) {
4761                         LOGE("Dropped result: %d Unblocking any pending pp buffers!",
4762                                 iter->frame_number);
4763                         channel->queueReprocMetadata(nullptr);
4764                     }
4765                     it->need_metadata = false;
4766                     break;
4767                 }
4768             }
4769 
4770             notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
4771         } else {
4772             result.output_buffers = nullptr;
4773             result.num_output_buffers = 0;
4774             orchestrateResult(&result);
4775         }
4776         // For reprocessing, result metadata is the same as settings so do not free it here to
4777         // avoid double free.
4778         if (result.result != iter->settings) {
4779             free_camera_metadata((camera_metadata_t *)result.result);
4780         }
4781         iter->resultMetadata = nullptr;
4782         iter = erasePendingRequest(iter);
4783     }
4784 
4785     if (isLiveRequest) {
4786         for (auto &iter : mPendingRequestsList) {
4787             // Increment pipeline depth for the following pending requests.
4788             if (iter.frame_number > frameNumber) {
4789                 iter.pipeline_depth++;
4790             }
4791         }
4792     }
4793 
4794     unblockRequestIfNecessary();
4795 }
4796 
4797 /*===========================================================================
4798  * FUNCTION   : unblockRequestIfNecessary
4799  *
4800  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4801  *              that mMutex is held when this function is called.
4802  *
4803  * PARAMETERS :
4804  *
4805  * RETURN     :
4806  *
4807  *==========================================================================*/
unblockRequestIfNecessary()4808 void QCamera3HardwareInterface::unblockRequestIfNecessary()
4809 {
4810    // Unblock process_capture_request
4811    pthread_cond_signal(&mRequestCond);
4812 }
4813 
4814 /*===========================================================================
4815  * FUNCTION   : isHdrSnapshotRequest
4816  *
4817  * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4818  *
4819  * PARAMETERS : camera3 request structure
4820  *
4821  * RETURN     : boolean decision variable
4822  *
4823  *==========================================================================*/
isHdrSnapshotRequest(camera3_capture_request * request)4824 bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4825 {
4826     if (request == NULL) {
4827         LOGE("Invalid request handle");
4828         assert(0);
4829         return false;
4830     }
4831 
4832     if (!mForceHdrSnapshot) {
4833         CameraMetadata frame_settings;
4834         frame_settings = request->settings;
4835 
4836         if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4837             uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4838             if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4839                 return false;
4840             }
4841         } else {
4842             return false;
4843         }
4844 
4845         if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4846             uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4847             if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4848                 return false;
4849             }
4850         } else {
4851             return false;
4852         }
4853     }
4854 
4855     for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4856         if (request->output_buffers[i].stream->format
4857                 == HAL_PIXEL_FORMAT_BLOB) {
4858             return true;
4859         }
4860     }
4861 
4862     return false;
4863 }
4864 /*===========================================================================
4865  * FUNCTION   : orchestrateRequest
4866  *
4867  * DESCRIPTION: Orchestrates a capture request from camera service
4868  *
4869  * PARAMETERS :
4870  *   @request : request from framework to process
4871  *
4872  * RETURN     : Error status codes
4873  *
4874  *==========================================================================*/
orchestrateRequest(camera3_capture_request_t * request)4875 int32_t QCamera3HardwareInterface::orchestrateRequest(
4876         camera3_capture_request_t *request)
4877 {
4878 
4879     uint32_t originalFrameNumber = request->frame_number;
4880     uint32_t originalOutputCount = request->num_output_buffers;
4881     const camera_metadata_t *original_settings = request->settings;
4882     List<InternalRequest> internallyRequestedStreams;
4883     List<InternalRequest> emptyInternalList;
4884 
4885     if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4886         LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4887         uint32_t internalFrameNumber;
4888         CameraMetadata modified_meta;
4889 
4890 
4891         /* Add Blob channel to list of internally requested streams */
4892         for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4893             if (request->output_buffers[i].stream->format
4894                     == HAL_PIXEL_FORMAT_BLOB) {
4895                 InternalRequest streamRequested;
4896                 streamRequested.meteringOnly = 1;
4897                 streamRequested.need_metadata = 0;
4898                 streamRequested.stream = request->output_buffers[i].stream;
4899                 internallyRequestedStreams.push_back(streamRequested);
4900             }
4901         }
4902         request->num_output_buffers = 0;
4903         auto itr =  internallyRequestedStreams.begin();
4904 
4905         /* Modify setting to set compensation */
4906         modified_meta = request->settings;
4907         int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4908         uint8_t aeLock = 1;
4909         modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4910         modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4911         camera_metadata_t *modified_settings = modified_meta.release();
4912         request->settings = modified_settings;
4913 
4914         /* Capture Settling & -2x frame */
4915         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4916         request->frame_number = internalFrameNumber;
4917         processCaptureRequest(request, internallyRequestedStreams);
4918 
4919         request->num_output_buffers = originalOutputCount;
4920         _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4921         request->frame_number = internalFrameNumber;
4922         processCaptureRequest(request, emptyInternalList);
4923         request->num_output_buffers = 0;
4924 
4925         modified_meta = modified_settings;
4926         expCompensation = 0;
4927         aeLock = 1;
4928         modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4929         modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4930         modified_settings = modified_meta.release();
4931         request->settings = modified_settings;
4932 
4933         /* Capture Settling & 0X frame */
4934 
4935         itr =  internallyRequestedStreams.begin();
4936         if (itr == internallyRequestedStreams.end()) {
4937             LOGE("Error Internally Requested Stream list is empty");
4938             assert(0);
4939         } else {
4940             itr->need_metadata = 0;
4941             itr->meteringOnly = 1;
4942         }
4943 
4944         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4945         request->frame_number = internalFrameNumber;
4946         processCaptureRequest(request, internallyRequestedStreams);
4947 
4948         itr =  internallyRequestedStreams.begin();
4949         if (itr == internallyRequestedStreams.end()) {
4950             ALOGE("Error Internally Requested Stream list is empty");
4951             assert(0);
4952         } else {
4953             itr->need_metadata = 1;
4954             itr->meteringOnly = 0;
4955         }
4956 
4957         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4958         request->frame_number = internalFrameNumber;
4959         processCaptureRequest(request, internallyRequestedStreams);
4960 
4961         /* Capture 2X frame*/
4962         modified_meta = modified_settings;
4963         expCompensation = GB_HDR_2X_STEP_EV;
4964         aeLock = 1;
4965         modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4966         modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4967         modified_settings = modified_meta.release();
4968         request->settings = modified_settings;
4969 
4970         itr =  internallyRequestedStreams.begin();
4971         if (itr == internallyRequestedStreams.end()) {
4972             ALOGE("Error Internally Requested Stream list is empty");
4973             assert(0);
4974         } else {
4975             itr->need_metadata = 0;
4976             itr->meteringOnly = 1;
4977         }
4978         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4979         request->frame_number = internalFrameNumber;
4980         processCaptureRequest(request, internallyRequestedStreams);
4981 
4982         itr =  internallyRequestedStreams.begin();
4983         if (itr == internallyRequestedStreams.end()) {
4984             ALOGE("Error Internally Requested Stream list is empty");
4985             assert(0);
4986         } else {
4987             itr->need_metadata = 1;
4988             itr->meteringOnly = 0;
4989         }
4990 
4991         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4992         request->frame_number = internalFrameNumber;
4993         processCaptureRequest(request, internallyRequestedStreams);
4994 
4995 
4996         /* Capture 2X on original streaming config*/
4997         internallyRequestedStreams.clear();
4998 
4999         /* Restore original settings pointer */
5000         request->settings = original_settings;
5001     } else {
5002         uint32_t internalFrameNumber;
5003         _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
5004         request->frame_number = internalFrameNumber;
5005         return processCaptureRequest(request, internallyRequestedStreams);
5006     }
5007 
5008     return NO_ERROR;
5009 }
5010 
5011 /*===========================================================================
5012  * FUNCTION   : orchestrateResult
5013  *
5014  * DESCRIPTION: Orchestrates a capture result to camera service
5015  *
5016  * PARAMETERS :
5017  *   @request : request from framework to process
5018  *
5019  * RETURN     :
5020  *
5021  *==========================================================================*/
orchestrateResult(camera3_capture_result_t * result)5022 void QCamera3HardwareInterface::orchestrateResult(
5023                     camera3_capture_result_t *result)
5024 {
5025     uint32_t frameworkFrameNumber;
5026     int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
5027             frameworkFrameNumber);
5028     if (rc != NO_ERROR) {
5029         LOGE("Cannot find translated frameworkFrameNumber");
5030         assert(0);
5031     } else {
5032         if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
5033             LOGD("Internal Request drop the result");
5034         } else {
5035             if (result->result != NULL) {
5036                 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
5037                 camera_metadata_entry_t entry;
5038                 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
5039                 if (ret == OK) {
5040                     int64_t sync_frame_number = frameworkFrameNumber;
5041                     ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
5042                     if (ret != OK)
5043                         LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
5044                 }
5045             }
5046             result->frame_number = frameworkFrameNumber;
5047             LOGH("process_capture_result frame_number %d, result %p, partial %d", result->frame_number, result->result, result->partial_result);
5048             mCallbackOps->process_capture_result(mCallbackOps, result);
5049         }
5050     }
5051 }
5052 
5053 /*===========================================================================
5054  * FUNCTION   : orchestrateNotify
5055  *
5056  * DESCRIPTION: Orchestrates a notify to camera service
5057  *
5058  * PARAMETERS :
5059  *   @request : request from framework to process
5060  *
5061  * RETURN     :
5062  *
5063  *==========================================================================*/
orchestrateNotify(camera3_notify_msg_t * notify_msg)5064 void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
5065 {
5066     uint32_t frameworkFrameNumber;
5067     uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
5068     int32_t rc = NO_ERROR;
5069 
5070     rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
5071                                                           frameworkFrameNumber);
5072 
5073     if (rc != NO_ERROR) {
5074         if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
5075             LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
5076             frameworkFrameNumber = 0;
5077         } else {
5078             LOGE("Cannot find translated frameworkFrameNumber");
5079             assert(0);
5080             return;
5081         }
5082     }
5083 
5084     if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
5085         LOGD("Internal Request drop the notifyCb");
5086     } else {
5087         notify_msg->message.shutter.frame_number = frameworkFrameNumber;
5088         mCallbackOps->notify(mCallbackOps, notify_msg);
5089     }
5090 }
5091 
5092 /*===========================================================================
5093  * FUNCTION   : FrameNumberRegistry
5094  *
5095  * DESCRIPTION: Constructor
5096  *
5097  * PARAMETERS :
5098  *
5099  * RETURN     :
5100  *
5101  *==========================================================================*/
FrameNumberRegistry()5102 FrameNumberRegistry::FrameNumberRegistry()
5103 {
5104     _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
5105 }
5106 
5107 /*===========================================================================
5108  * FUNCTION   : ~FrameNumberRegistry
5109  *
5110  * DESCRIPTION: Destructor
5111  *
5112  * PARAMETERS :
5113  *
5114  * RETURN     :
5115  *
5116  *==========================================================================*/
~FrameNumberRegistry()5117 FrameNumberRegistry::~FrameNumberRegistry()
5118 {
5119 }
5120 
5121 /*===========================================================================
5122  * FUNCTION   : PurgeOldEntriesLocked
5123  *
5124  * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
5125  *
5126  * PARAMETERS :
5127  *
5128  * RETURN     : NONE
5129  *
5130  *==========================================================================*/
purgeOldEntriesLocked()5131 void FrameNumberRegistry::purgeOldEntriesLocked()
5132 {
5133     while (_register.begin() != _register.end()) {
5134         auto itr = _register.begin();
5135         if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
5136             _register.erase(itr);
5137         } else {
5138             return;
5139         }
5140     }
5141 }
5142 
5143 /*===========================================================================
5144  * FUNCTION   : allocStoreInternalFrameNumber
5145  *
5146  * DESCRIPTION: Method to note down a framework request and associate a new
5147  *              internal request number against it
5148  *
5149  * PARAMETERS :
5150  *   @fFrameNumber: Identifier given by framework
5151  *   @internalFN  : Output parameter which will have the newly generated internal
5152  *                  entry
5153  *
5154  * RETURN     : Error code
5155  *
5156  *==========================================================================*/
allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,uint32_t & internalFrameNumber)5157 int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
5158                                                             uint32_t &internalFrameNumber)
5159 {
5160     Mutex::Autolock lock(mRegistryLock);
5161     internalFrameNumber = _nextFreeInternalNumber++;
5162     LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
5163     _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
5164     purgeOldEntriesLocked();
5165     return NO_ERROR;
5166 }
5167 
5168 /*===========================================================================
5169  * FUNCTION   : generateStoreInternalFrameNumber
5170  *
5171  * DESCRIPTION: Method to associate a new internal request number independent
5172  *              of any associate with framework requests
5173  *
5174  * PARAMETERS :
5175  *   @internalFrame#: Output parameter which will have the newly generated internal
5176  *
5177  *
5178  * RETURN     : Error code
5179  *
5180  *==========================================================================*/
generateStoreInternalFrameNumber(uint32_t & internalFrameNumber)5181 int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
5182 {
5183     Mutex::Autolock lock(mRegistryLock);
5184     internalFrameNumber = _nextFreeInternalNumber++;
5185     LOGD("Generated internal framenumber:%d", internalFrameNumber);
5186     _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
5187     purgeOldEntriesLocked();
5188     return NO_ERROR;
5189 }
5190 
5191 /*===========================================================================
5192  * FUNCTION   : getFrameworkFrameNumber
5193  *
5194  * DESCRIPTION: Method to query the framework framenumber given an internal #
5195  *
5196  * PARAMETERS :
5197  *   @internalFrame#: Internal reference
5198  *   @frameworkframenumber: Output parameter holding framework frame entry
5199  *
5200  * RETURN     : Error code
5201  *
5202  *==========================================================================*/
getFrameworkFrameNumber(uint32_t internalFrameNumber,uint32_t & frameworkFrameNumber)5203 int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
5204                                                      uint32_t &frameworkFrameNumber)
5205 {
5206     Mutex::Autolock lock(mRegistryLock);
5207     auto itr = _register.find(internalFrameNumber);
5208     if (itr == _register.end()) {
5209         LOGE("Cannot find internal#: %d", internalFrameNumber);
5210         return -ENOENT;
5211     }
5212 
5213     frameworkFrameNumber = itr->second;
5214     purgeOldEntriesLocked();
5215     return NO_ERROR;
5216 }
5217 
fillPbStreamConfig(pbcamera::StreamConfiguration * config,uint32_t pbStreamId,QCamera3Channel * channel,uint32_t streamIndex)5218 status_t QCamera3HardwareInterface::fillPbStreamConfig(
5219         pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
5220         uint32_t streamIndex) {
5221     if (config == nullptr) {
5222         LOGE("%s: config is null", __FUNCTION__);
5223         return BAD_VALUE;
5224     }
5225 
5226     if (channel == nullptr) {
5227         LOGE("%s: channel is null", __FUNCTION__);
5228         return BAD_VALUE;
5229     }
5230 
5231     QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
5232     if (stream == nullptr) {
5233         LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
5234         return NAME_NOT_FOUND;
5235     }
5236 
5237     const cam_stream_info_t* streamInfo = stream->getStreamInfo();
5238     if (streamInfo == nullptr) {
5239         LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
5240         return NAME_NOT_FOUND;
5241     }
5242 
5243     config->id = pbStreamId;
5244     config->image.width = streamInfo->dim.width;
5245     config->image.height = streamInfo->dim.height;
5246     config->image.padding = 0;
5247 
5248     int bytesPerPixel = 0;
5249 
5250     switch (streamInfo->fmt) {
5251         case CAM_FORMAT_YUV_420_NV21:
5252             config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
5253             bytesPerPixel = 1;
5254             break;
5255         case CAM_FORMAT_YUV_420_NV12:
5256         case CAM_FORMAT_YUV_420_NV12_VENUS:
5257             config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
5258             bytesPerPixel = 1;
5259             break;
5260         default:
5261             ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
5262             return BAD_VALUE;
5263     }
5264 
5265     uint32_t totalPlaneSize = 0;
5266 
5267     // Fill plane information.
5268     for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
5269         pbcamera::PlaneConfiguration plane;
5270         plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
5271         plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
5272         config->image.planes.push_back(plane);
5273 
5274         totalPlaneSize += (plane.stride * plane.scanline);
5275     }
5276 
5277     config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
5278     return OK;
5279 }
5280 
5281 /*===========================================================================
5282  * FUNCTION   : processCaptureRequest
5283  *
5284  * DESCRIPTION: process a capture request from camera service
5285  *
5286  * PARAMETERS :
5287  *   @request : request from framework to process
5288  *
5289  * RETURN     :
5290  *
5291  *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request,List<InternalRequest> & internallyRequestedStreams)5292 int QCamera3HardwareInterface::processCaptureRequest(
5293                     camera3_capture_request_t *request,
5294                     List<InternalRequest> &internallyRequestedStreams)
5295 {
5296     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
5297     int rc = NO_ERROR;
5298     int32_t request_id;
5299     CameraMetadata meta;
5300     bool isVidBufRequested = false;
5301     camera3_stream_buffer_t *pInputBuffer = NULL;
5302 
5303     pthread_mutex_lock(&mMutex);
5304 
5305     // Validate current state
5306     switch (mState) {
5307         case CONFIGURED:
5308         case STARTED:
5309             /* valid state */
5310             break;
5311 
5312         case ERROR:
5313             pthread_mutex_unlock(&mMutex);
5314             handleCameraDeviceError();
5315             return -ENODEV;
5316 
5317         default:
5318             LOGE("Invalid state %d", mState);
5319             pthread_mutex_unlock(&mMutex);
5320             return -ENODEV;
5321     }
5322 
5323     rc = validateCaptureRequest(request, internallyRequestedStreams);
5324     if (rc != NO_ERROR) {
5325         LOGE("incoming request is not valid");
5326         pthread_mutex_unlock(&mMutex);
5327         return rc;
5328     }
5329 
5330     meta = request->settings;
5331 
5332     if (mState == CONFIGURED) {
5333         logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
5334 
5335         // For HFR first capture request, send capture intent, and
5336         // stream on all streams
5337         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) && mBatchSize) {
5338             int32_t hal_version = CAM_HAL_V3;
5339             uint8_t captureIntent = meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5340             clear_metadata_buffer(mParameters);
5341             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
5342             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
5343             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
5344             if (rc < 0) {
5345                 LOGE("set_parms for for capture intent failed");
5346                 pthread_mutex_unlock(&mMutex);
5347                 return rc;
5348             }
5349         }
5350 
5351         uint8_t nrMode = 0;
5352         if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5353             nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5354         }
5355 
5356         cam_is_type_t is_type = IS_TYPE_NONE;
5357         bool setEis = isEISEnabled(meta);
5358         cam_sensor_mode_info_t sensorModeInfo = {};
5359         rc = getSensorModeInfo(sensorModeInfo);
5360         if (rc != NO_ERROR) {
5361             LOGE("Failed to get sensor output size");
5362             pthread_mutex_unlock(&mMutex);
5363             goto error_exit;
5364         }
5365 
5366         mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5367                 gCamCapability[mCameraId]->active_array_size.height,
5368                 sensorModeInfo.active_array_size.width,
5369                 sensorModeInfo.active_array_size.height);
5370 
5371         /* Set batchmode before initializing channel. Since registerBuffer
5372          * internally initializes some of the channels, better set batchmode
5373          * even before first register buffer */
5374         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5375             it != mStreamInfo.end(); it++) {
5376             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5377             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5378                     && mBatchSize) {
5379                 rc = channel->setBatchSize(mBatchSize);
5380                 //Disable per frame map unmap for HFR/batchmode case
5381                 rc |= channel->setPerFrameMapUnmap(false);
5382                 if (NO_ERROR != rc) {
5383                     LOGE("Channel init failed %d", rc);
5384                     pthread_mutex_unlock(&mMutex);
5385                     goto error_exit;
5386                 }
5387             }
5388         }
5389 
5390         //First initialize all streams
5391         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5392             it != mStreamInfo.end(); it++) {
5393             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5394 
5395             /* Initial value of NR mode is needed before stream on */
5396             channel->setNRMode(nrMode);
5397             if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5398                ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
5399                setEis) {
5400                 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5401                     if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5402                         is_type = mStreamConfigInfo.is_type[i];
5403                         break;
5404                     }
5405                 }
5406                 rc = channel->initialize(is_type);
5407             } else {
5408                 rc = channel->initialize(IS_TYPE_NONE);
5409             }
5410             if (NO_ERROR != rc) {
5411                 LOGE("Channel initialization failed %d", rc);
5412                 pthread_mutex_unlock(&mMutex);
5413                 goto error_exit;
5414             }
5415         }
5416 
5417         if (mRawDumpChannel) {
5418             rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5419             if (rc != NO_ERROR) {
5420                 LOGE("Error: Raw Dump Channel init failed");
5421                 pthread_mutex_unlock(&mMutex);
5422                 goto error_exit;
5423             }
5424         }
5425         if (mHdrPlusRawSrcChannel) {
5426             rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5427             if (rc != NO_ERROR) {
5428                 LOGE("Error: HDR+ RAW Source Channel init failed");
5429                 pthread_mutex_unlock(&mMutex);
5430                 goto error_exit;
5431             }
5432         }
5433         if (mSupportChannel) {
5434             rc = mSupportChannel->initialize(IS_TYPE_NONE);
5435             if (rc < 0) {
5436                 LOGE("Support channel initialization failed");
5437                 pthread_mutex_unlock(&mMutex);
5438                 goto error_exit;
5439             }
5440         }
5441         if (mAnalysisChannel) {
5442             rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5443             if (rc < 0) {
5444                 LOGE("Analysis channel initialization failed");
5445                 pthread_mutex_unlock(&mMutex);
5446                 goto error_exit;
5447             }
5448         }
5449         if (mDummyBatchChannel) {
5450             rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5451             if (rc < 0) {
5452                 LOGE("mDummyBatchChannel setBatchSize failed");
5453                 pthread_mutex_unlock(&mMutex);
5454                 goto error_exit;
5455             }
5456             rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
5457             if (rc < 0) {
5458                 LOGE("mDummyBatchChannel initialization failed");
5459                 pthread_mutex_unlock(&mMutex);
5460                 goto error_exit;
5461             }
5462         }
5463 
5464         // Set bundle info
5465         rc = setBundleInfo();
5466         if (rc < 0) {
5467             LOGE("setBundleInfo failed %d", rc);
5468             pthread_mutex_unlock(&mMutex);
5469             goto error_exit;
5470         }
5471 
5472         //update settings from app here
5473         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5474             mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5475             LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5476         }
5477         if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5478             mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5479             LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5480         }
5481         if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5482             mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5483             LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5484 
5485             if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5486                 (mLinkedCameraId != mCameraId) ) {
5487                 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5488                     mLinkedCameraId, mCameraId);
5489                 pthread_mutex_unlock(&mMutex);
5490                 goto error_exit;
5491             }
5492         }
5493 
5494         // add bundle related cameras
5495         LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5496         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5497             cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5498                     &m_pDualCamCmdPtr->bundle_info;
5499             m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
5500             if (mIsDeviceLinked)
5501                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5502             else
5503                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5504 
5505             pthread_mutex_lock(&gCamLock);
5506 
5507             if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5508                 LOGE("Dualcam: Invalid Session Id ");
5509                 pthread_mutex_unlock(&gCamLock);
5510                 pthread_mutex_unlock(&mMutex);
5511                 goto error_exit;
5512             }
5513 
5514             if (mIsMainCamera == 1) {
5515                 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5516                 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
5517                 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
5518                 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
5519                 // related session id should be session id of linked session
5520                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5521             } else {
5522                 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5523                 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
5524                 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
5525                 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
5526                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5527             }
5528             m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
5529             pthread_mutex_unlock(&gCamLock);
5530 
5531             rc = mCameraHandle->ops->set_dual_cam_cmd(
5532                     mCameraHandle->camera_handle);
5533             if (rc < 0) {
5534                 LOGE("Dualcam: link failed");
5535                 pthread_mutex_unlock(&mMutex);
5536                 goto error_exit;
5537             }
5538         }
5539         goto no_error;
5540 error_exit:
5541         mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
5542         return rc;
5543 no_error:
5544         mWokenUpByDaemon = false;
5545         mPendingLiveRequest = 0;
5546         mFirstConfiguration = false;
5547     }
5548 
5549     uint32_t frameNumber = request->frame_number;
5550     cam_stream_ID_t streamsArray;
5551 
5552     if (mFlushPerf) {
5553         //we cannot accept any requests during flush
5554         LOGE("process_capture_request cannot proceed during flush");
5555         pthread_mutex_unlock(&mMutex);
5556         return NO_ERROR; //should return an error
5557     }
5558 
5559     if (meta.exists(ANDROID_REQUEST_ID)) {
5560         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5561         mCurrentRequestId = request_id;
5562         LOGD("Received request with id: %d", request_id);
5563     } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5564         LOGE("Unable to find request id field, \
5565                 & no previous id available");
5566         pthread_mutex_unlock(&mMutex);
5567         return NAME_NOT_FOUND;
5568     } else {
5569         LOGD("Re-using old request id");
5570         request_id = mCurrentRequestId;
5571     }
5572 
5573     LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5574                                     request->num_output_buffers,
5575                                     request->input_buffer,
5576                                     frameNumber);
5577     // Acquire all request buffers first
5578     streamsArray.num_streams = 0;
5579     int blob_request = 0;
5580     bool depthRequestPresent = false;
5581     uint32_t snapshotStreamId = 0;
5582     for (size_t i = 0; i < request->num_output_buffers; i++) {
5583         const camera3_stream_buffer_t& output = request->output_buffers[i];
5584         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5585 
5586         if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5587                 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
5588             //FIXME??:Call function to store local copy of jpeg data for encode params.
5589             blob_request = 1;
5590             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5591         }
5592 
5593         if (output.acquire_fence != -1) {
5594            rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5595            close(output.acquire_fence);
5596            if (rc != OK) {
5597               LOGE("sync wait failed %d", rc);
5598               pthread_mutex_unlock(&mMutex);
5599               return rc;
5600            }
5601         }
5602 
5603         if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5604                 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
5605             depthRequestPresent = true;
5606             continue;
5607         }
5608 
5609         streamsArray.stream_request[streamsArray.num_streams++].streamID =
5610             channel->getStreamID(channel->getStreamTypeMask());
5611 
5612         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5613             isVidBufRequested = true;
5614         }
5615     }
5616 
5617     //FIXME: Add checks to ensure to dups in validateCaptureRequest
5618     for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5619           itr++) {
5620         QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5621         streamsArray.stream_request[streamsArray.num_streams++].streamID =
5622             channel->getStreamID(channel->getStreamTypeMask());
5623 
5624         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5625             isVidBufRequested = true;
5626         }
5627     }
5628 
5629     if (blob_request) {
5630         ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
5631         mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
5632     }
5633     if (blob_request && mRawDumpChannel) {
5634         LOGD("Trigger Raw based on blob request if Raw dump is enabled");
5635         streamsArray.stream_request[streamsArray.num_streams].streamID =
5636             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
5637         streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5638     }
5639 
5640     {
5641         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5642         // Request a RAW buffer if
5643         //  1. mHdrPlusRawSrcChannel is valid.
5644         //  2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5645         //  3. There is no pending HDR+ request.
5646         if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5647                 mHdrPlusPendingRequests.size() == 0) {
5648             streamsArray.stream_request[streamsArray.num_streams].streamID =
5649                 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5650             streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5651         }
5652     }
5653 
5654     //extract capture intent
5655     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5656         mCaptureIntent =
5657                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5658     }
5659 
5660     if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5661         mCacMode =
5662                 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5663     }
5664 
5665     uint8_t requestedLensShadingMapMode;
5666     // Get the shading map mode.
5667     if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5668         mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5669                 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5670     } else {
5671         requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5672     }
5673 
5674     if (meta.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
5675         mLastRequestedFaceDetectMode =
5676                 meta.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
5677     }
5678 
5679     if (meta.exists(ANDROID_STATISTICS_OIS_DATA_MODE)) {
5680         mLastRequestedOisDataMode =
5681                 meta.find(ANDROID_STATISTICS_OIS_DATA_MODE).data.u8[0];
5682     }
5683 
5684     bool hdrPlusRequest = false;
5685     HdrPlusPendingRequest pendingHdrPlusRequest = {};
5686 
5687     {
5688         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5689         // If this request has a still capture intent, try to submit an HDR+ request.
5690         if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5691                 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5692             hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5693         }
5694     }
5695 
5696     if (hdrPlusRequest) {
5697         // For a HDR+ request, just set the frame parameters.
5698         rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5699         if (rc < 0) {
5700             LOGE("fail to set frame parameters");
5701             pthread_mutex_unlock(&mMutex);
5702             return rc;
5703         }
5704     } else if(request->input_buffer == NULL) {
5705         /* Parse the settings:
5706          * - For every request in NORMAL MODE
5707          * - For every request in HFR mode during preview only case
5708          * - For first request of every batch in HFR mode during video
5709          * recording. In batchmode the same settings except frame number is
5710          * repeated in each request of the batch.
5711          */
5712         if (!mBatchSize ||
5713            (mBatchSize && !isVidBufRequested) ||
5714            (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
5715             rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5716             if (rc < 0) {
5717                 LOGE("fail to set frame parameters");
5718                 pthread_mutex_unlock(&mMutex);
5719                 return rc;
5720             }
5721 
5722             {
5723                 // If HDR+ mode is enabled, override the following modes so the necessary metadata
5724                 // will be included in the result metadata sent to Easel HDR+.
5725                 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5726                 if (mHdrPlusModeEnabled) {
5727                     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5728                         ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5729                     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STATS_FACEDETECT_MODE,
5730                         ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
5731                 }
5732             }
5733         }
5734         /* For batchMode HFR, setFrameParameters is not called for every
5735          * request. But only frame number of the latest request is parsed.
5736          * Keep track of first and last frame numbers in a batch so that
5737          * metadata for the frame numbers of batch can be duplicated in
5738          * handleBatchMetadta */
5739         if (mBatchSize) {
5740             if (!mToBeQueuedVidBufs) {
5741                 //start of the batch
5742                 mFirstFrameNumberInBatch = request->frame_number;
5743             }
5744             if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5745                 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5746                 LOGE("Failed to set the frame number in the parameters");
5747                 pthread_mutex_unlock(&mMutex);
5748                 return BAD_VALUE;
5749             }
5750         }
5751         if (mNeedSensorRestart) {
5752             /* Unlock the mutex as restartSensor waits on the channels to be
5753              * stopped, which in turn calls stream callback functions -
5754              * handleBufferWithLock and handleMetadataWithLock */
5755             pthread_mutex_unlock(&mMutex);
5756             rc = dynamicUpdateMetaStreamInfo();
5757             if (rc != NO_ERROR) {
5758                 LOGE("Restarting the sensor failed");
5759                 return BAD_VALUE;
5760             }
5761             mNeedSensorRestart = false;
5762             pthread_mutex_lock(&mMutex);
5763         }
5764         if(mResetInstantAEC) {
5765             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5766                     CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5767             mResetInstantAEC = false;
5768         }
5769     } else {
5770         if (request->input_buffer->acquire_fence != -1) {
5771            rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5772            close(request->input_buffer->acquire_fence);
5773            if (rc != OK) {
5774               LOGE("input buffer sync wait failed %d", rc);
5775               pthread_mutex_unlock(&mMutex);
5776               return rc;
5777            }
5778         }
5779     }
5780 
5781     if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5782         mLastCustIntentFrmNum = frameNumber;
5783     }
5784     /* Update pending request list and pending buffers map */
5785     PendingRequestInfo pendingRequest = {};
5786     pendingRequestIterator latestRequest;
5787     pendingRequest.frame_number = frameNumber;
5788     pendingRequest.num_buffers = depthRequestPresent ?
5789             (request->num_output_buffers - 1 ) : request->num_output_buffers;
5790     pendingRequest.request_id = request_id;
5791     pendingRequest.blob_request = blob_request;
5792     pendingRequest.timestamp = 0;
5793     pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
5794     pendingRequest.requestedFaceDetectMode = mLastRequestedFaceDetectMode;
5795     pendingRequest.requestedOisDataMode = mLastRequestedOisDataMode;
5796     if (request->input_buffer) {
5797         pendingRequest.input_buffer =
5798                 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5799         *(pendingRequest.input_buffer) = *(request->input_buffer);
5800         pInputBuffer = pendingRequest.input_buffer;
5801     } else {
5802        pendingRequest.input_buffer = NULL;
5803        pInputBuffer = NULL;
5804     }
5805     pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
5806 
5807     pendingRequest.pipeline_depth = 0;
5808     pendingRequest.partial_result_cnt = 0;
5809     extractJpegMetadata(mCurJpegMeta, request);
5810     pendingRequest.jpegMetadata = mCurJpegMeta;
5811     pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5812     pendingRequest.capture_intent = mCaptureIntent;
5813     if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5814         pendingRequest.hybrid_ae_enable =
5815                 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5816     }
5817 
5818     if (meta.exists(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE)) {
5819         pendingRequest.motion_detection_enable =
5820                 meta.find(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE).data.u8[0];
5821     }
5822 
5823     /* DevCamDebug metadata processCaptureRequest */
5824     if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5825         mDevCamDebugMetaEnable =
5826                 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5827     }
5828     pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5829     /* DevCamDebug metadata end */
5830 
5831     //extract CAC info
5832     if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5833         mCacMode =
5834                 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5835     }
5836     pendingRequest.fwkCacMode = mCacMode;
5837     pendingRequest.hdrplus = hdrPlusRequest;
5838     // We need to account for several dropped frames initially on sensor side.
5839     pendingRequest.expectedFrameDuration = (mState == CONFIGURED) ? (4 * mExpectedFrameDuration) :
5840         mExpectedFrameDuration;
5841     mExpectedInflightDuration += pendingRequest.expectedFrameDuration;
5842 
5843     // extract enableZsl info
5844     if (gExposeEnableZslKey) {
5845         if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5846             pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5847             mZslEnabled = pendingRequest.enableZsl;
5848         } else {
5849             pendingRequest.enableZsl = mZslEnabled;
5850         }
5851     }
5852 
5853     PendingBuffersInRequest bufsForCurRequest;
5854     bufsForCurRequest.frame_number = frameNumber;
5855     // Mark current timestamp for the new request
5856     bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
5857     bufsForCurRequest.av_timestamp = 0;
5858     bufsForCurRequest.hdrplus = hdrPlusRequest;
5859 
5860     if (hdrPlusRequest) {
5861         // Save settings for this request.
5862         pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5863         memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5864 
5865         // Add to pending HDR+ request queue.
5866         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5867         mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5868 
5869         ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5870     }
5871 
5872     for (size_t i = 0; i < request->num_output_buffers; i++) {
5873         if ((request->output_buffers[i].stream->data_space ==
5874                 HAL_DATASPACE_DEPTH) &&
5875                 (HAL_PIXEL_FORMAT_BLOB ==
5876                         request->output_buffers[i].stream->format)) {
5877             continue;
5878         }
5879         RequestedBufferInfo requestedBuf;
5880         memset(&requestedBuf, 0, sizeof(requestedBuf));
5881         requestedBuf.stream = request->output_buffers[i].stream;
5882         requestedBuf.buffer = NULL;
5883         pendingRequest.buffers.push_back(requestedBuf);
5884 
5885         // Add to buffer handle the pending buffers list
5886         PendingBufferInfo bufferInfo;
5887         bufferInfo.buffer = request->output_buffers[i].buffer;
5888         bufferInfo.stream = request->output_buffers[i].stream;
5889         bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5890         QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5891         LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5892             frameNumber, bufferInfo.buffer,
5893             channel->getStreamTypeMask(), bufferInfo.stream->format);
5894     }
5895     // Add this request packet into mPendingBuffersMap
5896     mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5897     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5898         mPendingBuffersMap.get_num_overall_buffers());
5899 
5900     latestRequest = mPendingRequestsList.insert(
5901             mPendingRequestsList.end(), pendingRequest);
5902 
5903     // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5904     // for the frame number.
5905     mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr,
5906             isStillZsl(pendingRequest));
5907     for (size_t i = 0; i < request->num_output_buffers; i++) {
5908         mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5909     }
5910 
5911     if(mFlush) {
5912         LOGI("mFlush is true");
5913         pthread_mutex_unlock(&mMutex);
5914         return NO_ERROR;
5915     }
5916 
5917     // If this is not an HDR+ request, send the request to metadata and each output buffer's
5918     // channel.
5919     if (!hdrPlusRequest) {
5920         int indexUsed;
5921         // Notify metadata channel we receive a request
5922         mMetadataChannel->request(NULL, frameNumber, indexUsed);
5923 
5924         if(request->input_buffer != NULL){
5925             LOGD("Input request, frame_number %d", frameNumber);
5926             rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5927             if (NO_ERROR != rc) {
5928                 LOGE("fail to set reproc parameters");
5929                 pthread_mutex_unlock(&mMutex);
5930                 return rc;
5931             }
5932         }
5933 
5934         // Call request on other streams
5935         uint32_t streams_need_metadata = 0;
5936         pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5937         for (size_t i = 0; i < request->num_output_buffers; i++) {
5938             const camera3_stream_buffer_t& output = request->output_buffers[i];
5939             QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5940 
5941             if (channel == NULL) {
5942                 LOGW("invalid channel pointer for stream");
5943                 continue;
5944             }
5945 
5946             if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5947                 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5948                           output.buffer, request->input_buffer, frameNumber);
5949                 if(request->input_buffer != NULL){
5950                     rc = channel->request(output.buffer, frameNumber,
5951                             pInputBuffer, &mReprocMeta, indexUsed, false, false);
5952                     if (rc < 0) {
5953                         LOGE("Fail to request on picture channel");
5954                         pthread_mutex_unlock(&mMutex);
5955                         return rc;
5956                     }
5957                 } else {
5958                     if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5959                         assert(NULL != mDepthChannel);
5960                         assert(mDepthChannel == output.stream->priv);
5961 
5962                         rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5963                         if (rc < 0) {
5964                             LOGE("Fail to map on depth buffer");
5965                             pthread_mutex_unlock(&mMutex);
5966                             return rc;
5967                         }
5968                         continue;
5969                     } else {
5970                         LOGD("snapshot request with buffer %p, frame_number %d",
5971                                  output.buffer, frameNumber);
5972                         if (!request->settings) {
5973                             rc = channel->request(output.buffer, frameNumber,
5974                                     NULL, mPrevParameters, indexUsed);
5975                         } else {
5976                             rc = channel->request(output.buffer, frameNumber,
5977                                     NULL, mParameters, indexUsed);
5978                         }
5979                         if (rc < 0) {
5980                             LOGE("Fail to request on picture channel");
5981                             pthread_mutex_unlock(&mMutex);
5982                             return rc;
5983                         }
5984 
5985                         uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5986                         uint32_t j = 0;
5987                         for (j = 0; j < streamsArray.num_streams; j++) {
5988                             if (streamsArray.stream_request[j].streamID == streamId) {
5989                                 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5990                                     streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5991                                 else
5992                                     streamsArray.stream_request[j].buf_index = indexUsed;
5993                                 break;
5994                             }
5995                         }
5996                         if (j == streamsArray.num_streams) {
5997                             LOGE("Did not find matching stream to update index");
5998                             assert(0);
5999                         }
6000 
6001                         pendingBufferIter->need_metadata = true;
6002 
6003                         if (isEISCropInSnapshotNeeded(meta)) {
6004                             pendingBufferIter->need_crop = true;
6005                             pendingBufferIter->crop_info = mLastEISCropInfo;
6006                         }
6007 
6008                         streams_need_metadata++;
6009                     }
6010                 }
6011             } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
6012                     output.stream->format == HAL_PIXEL_FORMAT_Y8) {
6013                 bool needMetadata = false;
6014                 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
6015                 rc = yuvChannel->request(output.buffer, frameNumber,
6016                         pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
6017                         needMetadata, indexUsed, false, false);
6018                 if (rc < 0) {
6019                     LOGE("Fail to request on YUV channel");
6020                     pthread_mutex_unlock(&mMutex);
6021                     return rc;
6022                 }
6023 
6024                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
6025                 uint32_t j = 0;
6026                 for (j = 0; j < streamsArray.num_streams; j++) {
6027                     if (streamsArray.stream_request[j].streamID == streamId) {
6028                         if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
6029                             streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
6030                         else
6031                             streamsArray.stream_request[j].buf_index = indexUsed;
6032                         break;
6033                     }
6034                 }
6035                 if (j == streamsArray.num_streams) {
6036                     LOGE("Did not find matching stream to update index");
6037                     assert(0);
6038                 }
6039 
6040                 pendingBufferIter->need_metadata = needMetadata;
6041                 if (needMetadata)
6042                     streams_need_metadata += 1;
6043                 LOGD("calling YUV channel request, need_metadata is %d",
6044                          needMetadata);
6045             } else {
6046                 LOGD("request with buffer %p, frame_number %d",
6047                       output.buffer, frameNumber);
6048 
6049                 rc = channel->request(output.buffer, frameNumber, indexUsed);
6050 
6051                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
6052                 uint32_t j = 0;
6053                 for (j = 0; j < streamsArray.num_streams; j++) {
6054                     if (streamsArray.stream_request[j].streamID == streamId) {
6055                         if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
6056                             streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
6057                         else
6058                             streamsArray.stream_request[j].buf_index = indexUsed;
6059                         break;
6060                     }
6061                 }
6062                 if (j == streamsArray.num_streams) {
6063                     LOGE("Did not find matching stream to update index");
6064                     assert(0);
6065                 }
6066 
6067                 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
6068                         && mBatchSize) {
6069                     mToBeQueuedVidBufs++;
6070                     if (mToBeQueuedVidBufs == mBatchSize) {
6071                         channel->queueBatchBuf();
6072                     }
6073                 }
6074                 if (rc < 0) {
6075                     LOGE("request failed");
6076                     pthread_mutex_unlock(&mMutex);
6077                     return rc;
6078                 }
6079             }
6080             pendingBufferIter++;
6081         }
6082 
6083         for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
6084               itr++) {
6085             QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
6086 
6087             if (channel == NULL) {
6088                 LOGE("invalid channel pointer for stream");
6089                 assert(0);
6090                 pthread_mutex_unlock(&mMutex);
6091                 return BAD_VALUE;
6092             }
6093 
6094             InternalRequest requestedStream;
6095             requestedStream = (*itr);
6096 
6097 
6098             if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
6099                 LOGD("snapshot request internally input buffer %p, frame_number %d",
6100                           request->input_buffer, frameNumber);
6101                 if(request->input_buffer != NULL){
6102                     rc = channel->request(NULL, frameNumber,
6103                             pInputBuffer, &mReprocMeta, indexUsed, true,
6104                             requestedStream.meteringOnly);
6105                     if (rc < 0) {
6106                         LOGE("Fail to request on picture channel");
6107                         pthread_mutex_unlock(&mMutex);
6108                         return rc;
6109                     }
6110                 } else {
6111                     LOGD("snapshot request with frame_number %d", frameNumber);
6112                     if (!request->settings) {
6113                         rc = channel->request(NULL, frameNumber,
6114                                 NULL, mPrevParameters, indexUsed, true,
6115                                 requestedStream.meteringOnly);
6116                     } else {
6117                         rc = channel->request(NULL, frameNumber,
6118                                 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
6119                     }
6120                     if (rc < 0) {
6121                         LOGE("Fail to request on picture channel");
6122                         pthread_mutex_unlock(&mMutex);
6123                         return rc;
6124                     }
6125 
6126                     if ((*itr).meteringOnly != 1) {
6127                         requestedStream.need_metadata = 1;
6128                         streams_need_metadata++;
6129                     }
6130                 }
6131 
6132                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
6133                 uint32_t j = 0;
6134                 for (j = 0; j < streamsArray.num_streams; j++) {
6135                     if (streamsArray.stream_request[j].streamID == streamId) {
6136                       if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
6137                           streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
6138                       else
6139                           streamsArray.stream_request[j].buf_index = indexUsed;
6140                         break;
6141                     }
6142                 }
6143                 if (j == streamsArray.num_streams) {
6144                     LOGE("Did not find matching stream to update index");
6145                     assert(0);
6146                 }
6147 
6148             } else {
6149                 LOGE("Internal requests not supported on this stream type");
6150                 assert(0);
6151                 pthread_mutex_unlock(&mMutex);
6152                 return INVALID_OPERATION;
6153             }
6154             latestRequest->internalRequestList.push_back(requestedStream);
6155         }
6156 
6157         //If 2 streams have need_metadata set to true, fail the request, unless
6158         //we copy/reference count the metadata buffer
6159         if (streams_need_metadata > 1) {
6160             LOGE("not supporting request in which two streams requires"
6161                     " 2 HAL metadata for reprocessing");
6162             pthread_mutex_unlock(&mMutex);
6163             return -EINVAL;
6164         }
6165 
6166         cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
6167                 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
6168         if (depthRequestPresent && mDepthChannel) {
6169             if (request->settings) {
6170                 camera_metadata_ro_entry entry;
6171                 if (find_camera_metadata_ro_entry(request->settings,
6172                         NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
6173                     if (entry.data.u8[0]) {
6174                         pdafEnable = CAM_PD_DATA_ENABLED;
6175                     } else {
6176                         pdafEnable = CAM_PD_DATA_SKIP;
6177                     }
6178                     mDepthCloudMode = pdafEnable;
6179                 } else {
6180                     pdafEnable = mDepthCloudMode;
6181                 }
6182             } else {
6183                 pdafEnable = mDepthCloudMode;
6184             }
6185         }
6186 
6187         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
6188                 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
6189             LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
6190             pthread_mutex_unlock(&mMutex);
6191             return BAD_VALUE;
6192         }
6193 
6194         if (request->input_buffer == NULL) {
6195             /* Set the parameters to backend:
6196              * - For every request in NORMAL MODE
6197              * - For every request in HFR mode during preview only case
6198              * - Once every batch in HFR mode during video recording
6199              */
6200             if (!mBatchSize ||
6201                (mBatchSize && !isVidBufRequested) ||
6202                (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
6203                 LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
6204                          mBatchSize, isVidBufRequested,
6205                         mToBeQueuedVidBufs);
6206 
6207                 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
6208                     for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
6209                         uint32_t m = 0;
6210                         for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
6211                             if (streamsArray.stream_request[k].streamID ==
6212                                     mBatchedStreamsArray.stream_request[m].streamID)
6213                                 break;
6214                             }
6215                             if (m == mBatchedStreamsArray.num_streams) {
6216                                 mBatchedStreamsArray.stream_request\
6217                                     [mBatchedStreamsArray.num_streams].streamID =
6218                                     streamsArray.stream_request[k].streamID;
6219                                 mBatchedStreamsArray.stream_request\
6220                                     [mBatchedStreamsArray.num_streams].buf_index =
6221                                     streamsArray.stream_request[k].buf_index;
6222                                 mBatchedStreamsArray.num_streams =
6223                                     mBatchedStreamsArray.num_streams + 1;
6224                             }
6225                     }
6226                     streamsArray = mBatchedStreamsArray;
6227                 }
6228                 /* Update stream id of all the requested buffers */
6229                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
6230                         streamsArray)) {
6231                     LOGE("Failed to set stream type mask in the parameters");
6232                     pthread_mutex_unlock(&mMutex);
6233                     return BAD_VALUE;
6234                 }
6235 
6236                 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
6237                         mParameters);
6238                 if (rc < 0) {
6239                     LOGE("set_parms failed");
6240                 }
6241                 /* reset to zero coz, the batch is queued */
6242                 mToBeQueuedVidBufs = 0;
6243                 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
6244                 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
6245             } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
6246                 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
6247                     uint32_t m = 0;
6248                     for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
6249                         if (streamsArray.stream_request[k].streamID ==
6250                                 mBatchedStreamsArray.stream_request[m].streamID)
6251                             break;
6252                     }
6253                     if (m == mBatchedStreamsArray.num_streams) {
6254                         mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
6255                                 streamID = streamsArray.stream_request[k].streamID;
6256                         mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
6257                                 buf_index = streamsArray.stream_request[k].buf_index;
6258                         mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
6259                     }
6260                 }
6261             }
6262             mPendingLiveRequest++;
6263 
6264             // Start all streams after the first setting is sent, so that the
6265             // setting can be applied sooner: (0 + apply_delay)th frame.
6266             if (mState == CONFIGURED && mChannelHandle) {
6267                 //Then start them.
6268                 LOGH("Start META Channel");
6269                 rc = mMetadataChannel->start();
6270                 if (rc < 0) {
6271                     LOGE("META channel start failed");
6272                     pthread_mutex_unlock(&mMutex);
6273                     return rc;
6274                 }
6275 
6276                 if (mAnalysisChannel) {
6277                     rc = mAnalysisChannel->start();
6278                     if (rc < 0) {
6279                         LOGE("Analysis channel start failed");
6280                         mMetadataChannel->stop();
6281                         pthread_mutex_unlock(&mMutex);
6282                         return rc;
6283                     }
6284                 }
6285 
6286                 if (mSupportChannel) {
6287                     rc = mSupportChannel->start();
6288                     if (rc < 0) {
6289                         LOGE("Support channel start failed");
6290                         mMetadataChannel->stop();
6291                         /* Although support and analysis are mutually exclusive today
6292                            adding it in anycase for future proofing */
6293                         if (mAnalysisChannel) {
6294                             mAnalysisChannel->stop();
6295                         }
6296                         pthread_mutex_unlock(&mMutex);
6297                         return rc;
6298                     }
6299                 }
6300                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6301                         it != mStreamInfo.end(); it++) {
6302                     QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6303                     LOGH("Start Processing Channel mask=%d",
6304                             channel->getStreamTypeMask());
6305                     rc = channel->start();
6306                     if (rc < 0) {
6307                         LOGE("channel start failed");
6308                         pthread_mutex_unlock(&mMutex);
6309                         return rc;
6310                     }
6311                 }
6312 
6313                 if (mRawDumpChannel) {
6314                     LOGD("Starting raw dump stream");
6315                     rc = mRawDumpChannel->start();
6316                     if (rc != NO_ERROR) {
6317                         LOGE("Error Starting Raw Dump Channel");
6318                         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6319                               it != mStreamInfo.end(); it++) {
6320                             QCamera3Channel *channel =
6321                                 (QCamera3Channel *)(*it)->stream->priv;
6322                             LOGH("Stopping Processing Channel mask=%d",
6323                                 channel->getStreamTypeMask());
6324                             channel->stop();
6325                         }
6326                         if (mSupportChannel)
6327                             mSupportChannel->stop();
6328                         if (mAnalysisChannel) {
6329                             mAnalysisChannel->stop();
6330                         }
6331                         mMetadataChannel->stop();
6332                         pthread_mutex_unlock(&mMutex);
6333                         return rc;
6334                     }
6335                 }
6336 
6337                 // Configure modules for stream on.
6338                 rc = startChannelLocked();
6339                 if (rc != NO_ERROR) {
6340                     LOGE("startChannelLocked failed %d", rc);
6341                     pthread_mutex_unlock(&mMutex);
6342                     return rc;
6343                 }
6344             }
6345         }
6346     }
6347 
6348     // Enable HDR+ mode for the first PREVIEW_INTENT request that doesn't disable HDR+.
6349     {
6350         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6351         if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
6352                 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6353                 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6354                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6355                 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
6356                 meta.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
6357                 meta.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 0) {
6358 
6359             if (isSessionHdrPlusModeCompatible()) {
6360                 rc = enableHdrPlusModeLocked();
6361                 if (rc != OK) {
6362                     LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6363                     pthread_mutex_unlock(&mMutex);
6364                     return rc;
6365                 }
6366             }
6367 
6368             mFirstPreviewIntentSeen = true;
6369         }
6370     }
6371 
6372     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6373 
6374     mState = STARTED;
6375     // Added a timed condition wait
6376     struct timespec ts;
6377     uint8_t isValidTimeout = 1;
6378     rc = clock_gettime(CLOCK_MONOTONIC, &ts);
6379     if (rc < 0) {
6380       isValidTimeout = 0;
6381       LOGE("Error reading the real time clock!!");
6382     }
6383     else {
6384       // Make timeout as 5 sec for request to be honored
6385       int64_t timeout = 5;
6386       {
6387           Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6388           // If there is a pending HDR+ request, the following requests may be blocked until the
6389           // HDR+ request is done. So allow a longer timeout.
6390           if (mHdrPlusPendingRequests.size() > 0) {
6391               timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6392           }
6393       }
6394       ts.tv_sec += timeout;
6395     }
6396     //Block on conditional variable
6397     while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
6398             (mState != ERROR) && (mState != DEINIT)) {
6399         if (!isValidTimeout) {
6400             LOGD("Blocking on conditional wait");
6401             pthread_cond_wait(&mRequestCond, &mMutex);
6402         }
6403         else {
6404             LOGD("Blocking on timed conditional wait");
6405             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6406             if (rc == ETIMEDOUT) {
6407                 rc = -ENODEV;
6408                 LOGE("Unblocked on timeout!!!!");
6409                 break;
6410             }
6411         }
6412         LOGD("Unblocked");
6413         if (mWokenUpByDaemon) {
6414             mWokenUpByDaemon = false;
6415             if (mPendingLiveRequest < mMaxInFlightRequests)
6416                 break;
6417         }
6418     }
6419     pthread_mutex_unlock(&mMutex);
6420 
6421     return rc;
6422 }
6423 
startChannelLocked()6424 int32_t QCamera3HardwareInterface::startChannelLocked()
6425 {
6426     // Configure modules for stream on.
6427     int32_t rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6428             mChannelHandle, /*start_sensor_streaming*/false);
6429     if (rc != NO_ERROR) {
6430         LOGE("start_channel failed %d", rc);
6431         return rc;
6432     }
6433 
6434     {
6435         // Configure Easel for stream on.
6436         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6437         if (EaselManagerClientOpened) {
6438             // Now that sensor mode should have been selected, get the selected sensor mode
6439             // info.
6440             memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6441             rc = getCurrentSensorModeInfo(mSensorModeInfo);
6442             if (rc != NO_ERROR) {
6443                 ALOGE("%s: Get current sensor mode failed, bail out: %s (%d).", __FUNCTION__,
6444                         strerror(-rc), rc);
6445                 return rc;
6446             }
6447             logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
6448             rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6449                     /*enableCapture*/true);
6450             if (rc != OK) {
6451                 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6452                         mCameraId, mSensorModeInfo.op_pixel_clk);
6453                 return rc;
6454             }
6455             logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
6456             mEaselMipiStarted = true;
6457         }
6458     }
6459 
6460     // Start sensor streaming.
6461     rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6462             mChannelHandle);
6463     if (rc != NO_ERROR) {
6464         LOGE("start_sensor_stream_on failed %d", rc);
6465         return rc;
6466     }
6467 
6468     return 0;
6469 }
6470 
stopChannelLocked(bool stopChannelImmediately)6471 void QCamera3HardwareInterface::stopChannelLocked(bool stopChannelImmediately)
6472 {
6473     mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6474             mChannelHandle, stopChannelImmediately);
6475 
6476     {
6477         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6478         if (EaselManagerClientOpened && mEaselMipiStarted) {
6479             int32_t rc = gEaselManagerClient->stopMipi(mCameraId);
6480             if (rc != 0) {
6481                 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
6482             }
6483             mEaselMipiStarted = false;
6484         }
6485     }
6486 }
6487 
6488 /*===========================================================================
6489  * FUNCTION   : dump
6490  *
6491  * DESCRIPTION:
6492  *
6493  * PARAMETERS :
6494  *
6495  *
6496  * RETURN     :
6497  *==========================================================================*/
dump(int fd)6498 void QCamera3HardwareInterface::dump(int fd)
6499 {
6500     pthread_mutex_lock(&mMutex);
6501     dprintf(fd, "\n Camera HAL3 information Begin \n");
6502 
6503     dprintf(fd, "\nNumber of pending requests: %zu \n",
6504         mPendingRequestsList.size());
6505     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6506     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
6507     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6508     for(pendingRequestIterator i = mPendingRequestsList.begin();
6509             i != mPendingRequestsList.end(); i++) {
6510         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6511         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6512         i->input_buffer);
6513     }
6514     dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6515                 mPendingBuffersMap.get_num_overall_buffers());
6516     dprintf(fd, "-------+------------------\n");
6517     dprintf(fd, " Frame | Stream type mask \n");
6518     dprintf(fd, "-------+------------------\n");
6519     for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6520         for(auto &j : req.mPendingBufferList) {
6521             QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6522             dprintf(fd, " %5d | %11d \n",
6523                     req.frame_number, channel->getStreamTypeMask());
6524         }
6525     }
6526     dprintf(fd, "-------+------------------\n");
6527 
6528     dprintf(fd, "\nPending frame drop list: %zu\n",
6529         mPendingFrameDropList.size());
6530     dprintf(fd, "-------+-----------\n");
6531     dprintf(fd, " Frame | Stream ID \n");
6532     dprintf(fd, "-------+-----------\n");
6533     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6534         i != mPendingFrameDropList.end(); i++) {
6535         dprintf(fd, " %5d | %9d \n",
6536             i->frame_number, i->stream_ID);
6537     }
6538     dprintf(fd, "-------+-----------\n");
6539 
6540     dprintf(fd, "\n Camera HAL3 information End \n");
6541 
6542     /* use dumpsys media.camera as trigger to send update debug level event */
6543     mUpdateDebugLevel = true;
6544     pthread_mutex_unlock(&mMutex);
6545     return;
6546 }
6547 
6548 /*===========================================================================
6549  * FUNCTION   : flush
6550  *
6551  * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6552  *              conditionally restarts channels
6553  *
6554  * PARAMETERS :
6555  *  @ restartChannels: re-start all channels
6556  *  @ stopChannelImmediately: stop the channel immediately. This should be used
6557  *                            when device encountered an error and MIPI may has
6558  *                            been stopped.
6559  *
6560  * RETURN     :
6561  *          0 on success
6562  *          Error code on failure
6563  *==========================================================================*/
flush(bool restartChannels,bool stopChannelImmediately)6564 int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
6565 {
6566     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
6567     int32_t rc = NO_ERROR;
6568 
6569     LOGD("Unblocking Process Capture Request");
6570     pthread_mutex_lock(&mMutex);
6571     mFlush = true;
6572     pthread_mutex_unlock(&mMutex);
6573 
6574     // Disable HDR+ if it's enabled;
6575     {
6576         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6577         finishHdrPlusClientOpeningLocked(l);
6578         disableHdrPlusModeLocked();
6579     }
6580 
6581     rc = stopAllChannels();
6582     // unlink of dualcam
6583     if (mIsDeviceLinked) {
6584         cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6585                 &m_pDualCamCmdPtr->bundle_info;
6586         m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
6587         m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6588         pthread_mutex_lock(&gCamLock);
6589 
6590         if (mIsMainCamera == 1) {
6591             m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6592             m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
6593             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
6594             // related session id should be session id of linked session
6595             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6596         } else {
6597             m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6598             m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
6599             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
6600             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6601         }
6602         m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
6603         pthread_mutex_unlock(&gCamLock);
6604 
6605         rc = mCameraHandle->ops->set_dual_cam_cmd(
6606                 mCameraHandle->camera_handle);
6607         if (rc < 0) {
6608             LOGE("Dualcam: Unlink failed, but still proceed to close");
6609         }
6610     }
6611 
6612     if (rc < 0) {
6613         LOGE("stopAllChannels failed");
6614         return rc;
6615     }
6616     if (mChannelHandle) {
6617         stopChannelLocked(stopChannelImmediately);
6618     }
6619 
6620     // Reset bundle info
6621     rc = setBundleInfo();
6622     if (rc < 0) {
6623         LOGE("setBundleInfo failed %d", rc);
6624         return rc;
6625     }
6626 
6627     // Mutex Lock
6628     pthread_mutex_lock(&mMutex);
6629 
6630     // Unblock process_capture_request
6631     mPendingLiveRequest = 0;
6632     pthread_cond_signal(&mRequestCond);
6633 
6634     rc = notifyErrorForPendingRequests();
6635     if (rc < 0) {
6636         LOGE("notifyErrorForPendingRequests failed");
6637         pthread_mutex_unlock(&mMutex);
6638         return rc;
6639     }
6640 
6641     mFlush = false;
6642 
6643     // Start the Streams/Channels
6644     if (restartChannels) {
6645         rc = startAllChannels();
6646         if (rc < 0) {
6647             LOGE("startAllChannels failed");
6648             pthread_mutex_unlock(&mMutex);
6649             return rc;
6650         }
6651         if (mChannelHandle) {
6652             // Configure modules for stream on.
6653             rc = startChannelLocked();
6654             if (rc < 0) {
6655                 LOGE("startChannelLocked failed");
6656                 pthread_mutex_unlock(&mMutex);
6657                 return rc;
6658             }
6659         }
6660         mFirstPreviewIntentSeen = false;
6661     }
6662     pthread_mutex_unlock(&mMutex);
6663 
6664     return 0;
6665 }
6666 
6667 /*===========================================================================
6668  * FUNCTION   : flushPerf
6669  *
6670  * DESCRIPTION: This is the performance optimization version of flush that does
6671  *              not use stream off, rather flushes the system
6672  *
6673  * PARAMETERS :
6674  *
6675  *
6676  * RETURN     : 0 : success
6677  *              -EINVAL: input is malformed (device is not valid)
6678  *              -ENODEV: if the device has encountered a serious error
6679  *==========================================================================*/
flushPerf()6680 int QCamera3HardwareInterface::flushPerf()
6681 {
6682     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
6683     int32_t rc = 0;
6684     struct timespec timeout;
6685     bool timed_wait = false;
6686 
6687     pthread_mutex_lock(&mMutex);
6688     mFlushPerf = true;
6689     mPendingBuffersMap.numPendingBufsAtFlush =
6690         mPendingBuffersMap.get_num_overall_buffers();
6691     LOGD("Calling flush. Wait for %d buffers to return",
6692         mPendingBuffersMap.numPendingBufsAtFlush);
6693 
6694     /* send the flush event to the backend */
6695     rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6696     if (rc < 0) {
6697         LOGE("Error in flush: IOCTL failure");
6698         mFlushPerf = false;
6699         pthread_mutex_unlock(&mMutex);
6700         return -ENODEV;
6701     }
6702 
6703     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6704         LOGD("No pending buffers in HAL, return flush");
6705         mFlushPerf = false;
6706         pthread_mutex_unlock(&mMutex);
6707         return rc;
6708     }
6709 
6710     /* wait on a signal that buffers were received */
6711     rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
6712     if (rc < 0) {
6713         LOGE("Error reading the real time clock, cannot use timed wait");
6714     } else {
6715         timeout.tv_sec += FLUSH_TIMEOUT;
6716         timed_wait = true;
6717     }
6718 
6719     //Block on conditional variable
6720     while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6721         LOGD("Waiting on mBuffersCond");
6722         if (!timed_wait) {
6723             rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6724             if (rc != 0) {
6725                  LOGE("pthread_cond_wait failed due to rc = %s",
6726                         strerror(rc));
6727                  break;
6728             }
6729         } else {
6730             rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6731             if (rc != 0) {
6732                 LOGE("pthread_cond_timedwait failed due to rc = %s",
6733                             strerror(rc));
6734                 break;
6735             }
6736         }
6737     }
6738     if (rc != 0) {
6739         mFlushPerf = false;
6740         pthread_mutex_unlock(&mMutex);
6741         return -ENODEV;
6742     }
6743 
6744     LOGD("Received buffers, now safe to return them");
6745 
6746     //make sure the channels handle flush
6747     //currently only required for the picture channel to release snapshot resources
6748     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6749             it != mStreamInfo.end(); it++) {
6750         QCamera3Channel *channel = (*it)->channel;
6751         if (channel) {
6752             rc = channel->flush();
6753             if (rc) {
6754                LOGE("Flushing the channels failed with error %d", rc);
6755                // even though the channel flush failed we need to continue and
6756                // return the buffers we have to the framework, however the return
6757                // value will be an error
6758                rc = -ENODEV;
6759             }
6760         }
6761     }
6762 
6763     /* notify the frameworks and send errored results */
6764     rc = notifyErrorForPendingRequests();
6765     if (rc < 0) {
6766         LOGE("notifyErrorForPendingRequests failed");
6767         pthread_mutex_unlock(&mMutex);
6768         return rc;
6769     }
6770 
6771     //unblock process_capture_request
6772     mPendingLiveRequest = 0;
6773     unblockRequestIfNecessary();
6774 
6775     mFlushPerf = false;
6776     pthread_mutex_unlock(&mMutex);
6777     LOGD ("Flush Operation complete. rc = %d", rc);
6778     return rc;
6779 }
6780 
6781 /*===========================================================================
6782  * FUNCTION   : handleCameraDeviceError
6783  *
6784  * DESCRIPTION: This function calls internal flush and notifies the error to
6785  *              framework and updates the state variable.
6786  *
6787  * PARAMETERS :
6788  *   @stopChannelImmediately : stop channels immediately without waiting for
6789  *                             frame boundary.
6790  *
6791  * RETURN     : NO_ERROR on Success
6792  *              Error code on failure
6793  *==========================================================================*/
handleCameraDeviceError(bool stopChannelImmediately)6794 int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
6795 {
6796     int32_t rc = NO_ERROR;
6797 
6798     {
6799         Mutex::Autolock lock(mFlushLock);
6800         pthread_mutex_lock(&mMutex);
6801         if (mState != ERROR) {
6802             //if mState != ERROR, nothing to be done
6803             pthread_mutex_unlock(&mMutex);
6804             return NO_ERROR;
6805         }
6806         pthread_mutex_unlock(&mMutex);
6807 
6808         rc = flush(false /* restart channels */, stopChannelImmediately);
6809         if (NO_ERROR != rc) {
6810             LOGE("internal flush to handle mState = ERROR failed");
6811         }
6812 
6813         pthread_mutex_lock(&mMutex);
6814         mState = DEINIT;
6815         pthread_mutex_unlock(&mMutex);
6816     }
6817 
6818     camera3_notify_msg_t notify_msg;
6819     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6820     notify_msg.type = CAMERA3_MSG_ERROR;
6821     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6822     notify_msg.message.error.error_stream = NULL;
6823     notify_msg.message.error.frame_number = 0;
6824     orchestrateNotify(&notify_msg);
6825 
6826     return rc;
6827 }
6828 
6829 /*===========================================================================
6830  * FUNCTION   : captureResultCb
6831  *
6832  * DESCRIPTION: Callback handler for all capture result
6833  *              (streams, as well as metadata)
6834  *
6835  * PARAMETERS :
6836  *   @metadata : metadata information
6837  *   @buffer   : actual gralloc buffer to be returned to frameworks.
6838  *               NULL if metadata.
6839  *
6840  * RETURN     : NONE
6841  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer)6842 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6843                 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6844 {
6845     if (metadata_buf) {
6846         pthread_mutex_lock(&mMutex);
6847         uint8_t batchSize = mBatchSize;
6848         pthread_mutex_unlock(&mMutex);
6849         if (batchSize) {
6850             handleBatchMetadata(metadata_buf,
6851                     true /* free_and_bufdone_meta_buf */);
6852         } else { /* mBatchSize = 0 */
6853             hdrPlusPerfLock(metadata_buf);
6854             pthread_mutex_lock(&mMutex);
6855             handleMetadataWithLock(metadata_buf,
6856                     true /* free_and_bufdone_meta_buf */,
6857                     true /* last urgent frame of batch metadata */,
6858                     true /* last frame of batch metadata */,
6859                     NULL);
6860             pthread_mutex_unlock(&mMutex);
6861         }
6862     } else if (isInputBuffer) {
6863         pthread_mutex_lock(&mMutex);
6864         handleInputBufferWithLock(frame_number);
6865         pthread_mutex_unlock(&mMutex);
6866     } else {
6867         pthread_mutex_lock(&mMutex);
6868         handleBufferWithLock(buffer, frame_number);
6869         pthread_mutex_unlock(&mMutex);
6870     }
6871     return;
6872 }
6873 
6874 /*===========================================================================
6875  * FUNCTION   : getReprocessibleOutputStreamId
6876  *
6877  * DESCRIPTION: Get source output stream id for the input reprocess stream
6878  *              based on size and format, which would be the largest
6879  *              output stream if an input stream exists.
6880  *
6881  * PARAMETERS :
6882  *   @id      : return the stream id if found
6883  *
6884  * RETURN     : int32_t type of status
6885  *              NO_ERROR  -- success
6886  *              none-zero failure code
6887  *==========================================================================*/
getReprocessibleOutputStreamId(uint32_t & id)6888 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6889 {
6890     /* check if any output or bidirectional stream with the same size and format
6891        and return that stream */
6892     if ((mInputStreamInfo.dim.width > 0) &&
6893             (mInputStreamInfo.dim.height > 0)) {
6894         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6895                 it != mStreamInfo.end(); it++) {
6896 
6897             camera3_stream_t *stream = (*it)->stream;
6898             if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6899                     (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6900                     (stream->format == mInputStreamInfo.format)) {
6901                 // Usage flag for an input stream and the source output stream
6902                 // may be different.
6903                 LOGD("Found reprocessible output stream! %p", *it);
6904                 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6905                          stream->usage, mInputStreamInfo.usage);
6906 
6907                 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6908                 if (channel != NULL && channel->mStreams[0]) {
6909                     id = channel->mStreams[0]->getMyServerID();
6910                     return NO_ERROR;
6911                 }
6912             }
6913         }
6914     } else {
6915         LOGD("No input stream, so no reprocessible output stream");
6916     }
6917     return NAME_NOT_FOUND;
6918 }
6919 
6920 /*===========================================================================
6921  * FUNCTION   : lookupFwkName
6922  *
6923  * DESCRIPTION: In case the enum is not same in fwk and backend
6924  *              make sure the parameter is correctly propogated
6925  *
6926  * PARAMETERS  :
6927  *   @arr      : map between the two enums
6928  *   @len      : len of the map
6929  *   @hal_name : name of the hal_parm to map
6930  *
6931  * RETURN     : int type of status
6932  *              fwk_name  -- success
6933  *              none-zero failure code
6934  *==========================================================================*/
lookupFwkName(const mapType * arr,size_t len,halType hal_name)6935 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6936         size_t len, halType hal_name)
6937 {
6938 
6939     for (size_t i = 0; i < len; i++) {
6940         if (arr[i].hal_name == hal_name) {
6941             return arr[i].fwk_name;
6942         }
6943     }
6944 
6945     /* Not able to find matching framework type is not necessarily
6946      * an error case. This happens when mm-camera supports more attributes
6947      * than the frameworks do */
6948     LOGH("Cannot find matching framework type");
6949     return NAME_NOT_FOUND;
6950 }
6951 
6952 /*===========================================================================
6953  * FUNCTION   : lookupHalName
6954  *
6955  * DESCRIPTION: In case the enum is not same in fwk and backend
6956  *              make sure the parameter is correctly propogated
6957  *
6958  * PARAMETERS  :
6959  *   @arr      : map between the two enums
6960  *   @len      : len of the map
6961  *   @fwk_name : name of the hal_parm to map
6962  *
6963  * RETURN     : int32_t type of status
6964  *              hal_name  -- success
6965  *              none-zero failure code
6966  *==========================================================================*/
lookupHalName(const mapType * arr,size_t len,fwkType fwk_name)6967 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6968         size_t len, fwkType fwk_name)
6969 {
6970     for (size_t i = 0; i < len; i++) {
6971         if (arr[i].fwk_name == fwk_name) {
6972             return arr[i].hal_name;
6973         }
6974     }
6975 
6976     LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6977     return NAME_NOT_FOUND;
6978 }
6979 
6980 /*===========================================================================
6981  * FUNCTION   : lookupProp
6982  *
6983  * DESCRIPTION: lookup a value by its name
6984  *
6985  * PARAMETERS :
6986  *   @arr     : map between the two enums
6987  *   @len     : size of the map
6988  *   @name    : name to be looked up
6989  *
6990  * RETURN     : Value if found
6991  *              CAM_CDS_MODE_MAX if not found
6992  *==========================================================================*/
lookupProp(const mapType * arr,size_t len,const char * name)6993 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6994         size_t len, const char *name)
6995 {
6996     if (name) {
6997         for (size_t i = 0; i < len; i++) {
6998             if (!strcmp(arr[i].desc, name)) {
6999                 return arr[i].val;
7000             }
7001         }
7002     }
7003     return CAM_CDS_MODE_MAX;
7004 }
7005 
7006 /*===========================================================================
7007  *
7008  * DESCRIPTION:
7009  *
7010  * PARAMETERS :
7011  *   @metadata : metadata information from callback
7012  *   @pendingRequest: pending request for this metadata
7013  *   @pprocDone: whether internal offline postprocsesing is done
7014  *   @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
7015  *                         in a batch. Always true for non-batch mode.
7016  *
7017  * RETURN     : camera_metadata_t*
7018  *              metadata in a format specified by fwk
7019  *==========================================================================*/
7020 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,const PendingRequestInfo & pendingRequest,bool pprocDone,bool lastMetadataInBatch,const bool * enableZsl)7021 QCamera3HardwareInterface::translateFromHalMetadata(
7022                                  metadata_buffer_t *metadata,
7023                                  const PendingRequestInfo& pendingRequest,
7024                                  bool pprocDone,
7025                                  bool lastMetadataInBatch,
7026                                  const bool *enableZsl)
7027 {
7028     CameraMetadata camMetadata;
7029     camera_metadata_t *resultMetadata;
7030 
7031     if (!lastMetadataInBatch) {
7032         /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
7033          * Timestamp is needed because it's used for shutter notify calculation.
7034          * */
7035         camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
7036         resultMetadata = camMetadata.release();
7037         return resultMetadata;
7038     }
7039 
7040     if (pendingRequest.jpegMetadata.entryCount())
7041         camMetadata.append(pendingRequest.jpegMetadata);
7042 
7043     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
7044     camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
7045     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
7046     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
7047     camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
7048     camMetadata.update(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE, &pendingRequest.motion_detection_enable, 1);
7049     if (mBatchSize == 0) {
7050         // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
7051         camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
7052     }
7053 
7054     // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
7055     // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
7056     if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
7057         // DevCamDebug metadata translateFromHalMetadata AF
7058         IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
7059                 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
7060             int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
7061             camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
7062         }
7063         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
7064                 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
7065             int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
7066             camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
7067         }
7068         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
7069                 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
7070             int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
7071             camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
7072         }
7073         IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
7074                 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
7075             int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
7076             camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
7077         }
7078         IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
7079                 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
7080             int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
7081             camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
7082         }
7083         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
7084                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
7085             int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
7086                 *DevCamDebug_af_monitor_pdaf_target_pos;
7087             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
7088                 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
7089         }
7090         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
7091                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
7092             int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
7093                 *DevCamDebug_af_monitor_pdaf_confidence;
7094             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
7095                 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
7096         }
7097         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
7098                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
7099             int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
7100             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
7101                 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
7102         }
7103         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
7104                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
7105             int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
7106                 *DevCamDebug_af_monitor_tof_target_pos;
7107             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
7108                 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
7109         }
7110         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
7111                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
7112             int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
7113                 *DevCamDebug_af_monitor_tof_confidence;
7114             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
7115                 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
7116         }
7117         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
7118                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
7119             int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
7120             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
7121                 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
7122         }
7123         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
7124                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
7125             int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
7126             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
7127                 &fwk_DevCamDebug_af_monitor_type_select, 1);
7128         }
7129         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
7130                 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
7131             int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
7132             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
7133                 &fwk_DevCamDebug_af_monitor_refocus, 1);
7134         }
7135         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
7136                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
7137             int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
7138             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
7139                 &fwk_DevCamDebug_af_monitor_target_pos, 1);
7140         }
7141         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
7142                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
7143             int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
7144                 *DevCamDebug_af_search_pdaf_target_pos;
7145             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
7146                 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
7147         }
7148         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
7149                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
7150             int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
7151             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
7152                 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
7153         }
7154         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
7155                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
7156             int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
7157             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
7158                 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
7159         }
7160         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
7161                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
7162             int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
7163             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
7164                 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
7165         }
7166         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
7167                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
7168             int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
7169             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
7170                 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
7171         }
7172         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
7173                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
7174             int32_t fwk_DevCamDebug_af_search_tof_target_pos =
7175                 *DevCamDebug_af_search_tof_target_pos;
7176             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
7177                 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
7178         }
7179         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
7180                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
7181             int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
7182             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
7183                 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
7184         }
7185         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
7186                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
7187             int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
7188             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
7189                 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
7190         }
7191         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
7192                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
7193             int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
7194             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
7195                 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
7196         }
7197         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
7198                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
7199             int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
7200             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
7201                 &fwk_DevCamDebug_af_search_tof_confidence, 1);
7202         }
7203         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
7204                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
7205             int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
7206             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
7207                 &fwk_DevCamDebug_af_search_type_select, 1);
7208         }
7209         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
7210                 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
7211             int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
7212             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
7213                 &fwk_DevCamDebug_af_search_next_pos, 1);
7214         }
7215         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
7216                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
7217             int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
7218             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
7219                 &fwk_DevCamDebug_af_search_target_pos, 1);
7220         }
7221         // DevCamDebug metadata translateFromHalMetadata AEC
7222         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
7223                 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
7224             int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
7225             camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
7226     }
7227         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
7228                 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
7229             int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
7230             camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
7231         }
7232         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
7233                 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
7234             int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
7235             camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
7236         }
7237         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
7238                 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
7239             int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
7240             camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
7241         }
7242         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
7243                 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
7244             int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
7245             camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
7246         }
7247         IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
7248                 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
7249             float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
7250             camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
7251         }
7252         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
7253                 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
7254             int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
7255             camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
7256         }
7257         IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
7258                 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
7259             float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
7260             camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
7261         }
7262         // DevCamDebug metadata translateFromHalMetadata zzHDR
7263         IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
7264                 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
7265             float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
7266             camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
7267         }
7268         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
7269                 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
7270             int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
7271             camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
7272         }
7273         IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
7274                 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
7275             float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
7276             camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
7277         }
7278         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
7279                 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
7280             int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
7281             camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
7282         }
7283         IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
7284                 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
7285             float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
7286                 *DevCamDebug_aec_hdr_sensitivity_ratio;
7287             camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
7288                                &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
7289         }
7290         IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
7291                 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
7292             float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
7293             camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
7294                                &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
7295         }
7296         // DevCamDebug metadata translateFromHalMetadata ADRC
7297         IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
7298                 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
7299             float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
7300             camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
7301                                &fwk_DevCamDebug_aec_total_drc_gain, 1);
7302         }
7303         IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
7304                 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
7305             float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
7306             camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
7307                                &fwk_DevCamDebug_aec_color_drc_gain, 1);
7308         }
7309         IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
7310                 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
7311             float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
7312             camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
7313         }
7314         IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
7315                 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
7316             float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
7317             camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
7318         }
7319         IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7320                 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7321             float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7322             camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7323         }
7324         IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7325                 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7326             float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7327             camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7328         }
7329         // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7330         IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7331                 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7332             float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7333             camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7334                                &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7335         }
7336         IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7337                 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7338             float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7339             camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7340                                &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7341         }
7342         IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7343                 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7344             float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7345             camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7346                                &fwk_DevCamDebug_aec_subject_motion, 1);
7347         }
7348         // DevCamDebug metadata translateFromHalMetadata AWB
7349         IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7350                 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7351             float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7352             camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7353         }
7354         IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7355                 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7356             float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7357             camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7358         }
7359         IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7360                 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7361             float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7362             camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7363         }
7364         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7365                 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7366             int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7367             camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7368         }
7369         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7370                 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7371             int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7372             camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7373         }
7374     }
7375     // atrace_end(ATRACE_TAG_ALWAYS);
7376 
7377     IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7378         int64_t fwk_frame_number = *frame_number;
7379         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7380     }
7381 
7382     IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7383         int32_t fps_range[2];
7384         fps_range[0] = (int32_t)float_range->min_fps;
7385         fps_range[1] = (int32_t)float_range->max_fps;
7386         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7387                                       fps_range, 2);
7388         LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7389              fps_range[0], fps_range[1]);
7390     }
7391 
7392     IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7393         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7394     }
7395 
7396     IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7397         int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7398                 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7399                 *sceneMode);
7400         if (NAME_NOT_FOUND != val) {
7401             uint8_t fwkSceneMode = (uint8_t)val;
7402             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7403             LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7404                      fwkSceneMode);
7405         }
7406     }
7407 
7408     IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7409         uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7410         camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7411     }
7412 
7413     IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7414         uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7415         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7416     }
7417 
7418     IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7419         uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7420         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7421     }
7422 
7423     IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7424             CAM_INTF_META_EDGE_MODE, metadata) {
7425         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7426     }
7427 
7428     IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7429         uint8_t fwk_flashPower = (uint8_t) *flashPower;
7430         camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7431     }
7432 
7433     IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7434         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7435     }
7436 
7437     IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7438         if (0 <= *flashState) {
7439             uint8_t fwk_flashState = (uint8_t) *flashState;
7440             if (!gCamCapability[mCameraId]->flash_available) {
7441                 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7442             }
7443             camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7444         }
7445     }
7446 
7447     IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7448         int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7449         if (NAME_NOT_FOUND != val) {
7450             uint8_t fwk_flashMode = (uint8_t)val;
7451             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7452         }
7453     }
7454 
7455     IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7456         uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7457         camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7458     }
7459 
7460     IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7461         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7462     }
7463 
7464     IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7465         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7466     }
7467 
7468     IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7469         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7470     }
7471 
7472     IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7473         uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7474         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7475     }
7476 
7477     IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7478         uint8_t fwk_videoStab = (uint8_t) *videoStab;
7479         LOGD("fwk_videoStab = %d", fwk_videoStab);
7480         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7481     } else {
7482         // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7483         // and so hardcoding the Video Stab result to OFF mode.
7484         uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7485         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
7486         LOGD("EIS result default to OFF mode");
7487     }
7488 
7489     IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7490         uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7491         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7492     }
7493 
7494     IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7495         camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7496     }
7497 
7498     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7499         CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
7500         float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
7501 
7502         adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7503               gCamCapability[mCameraId]->color_arrangement);
7504 
7505         LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
7506           blackLevelAppliedPattern->cam_black_level[0],
7507           blackLevelAppliedPattern->cam_black_level[1],
7508           blackLevelAppliedPattern->cam_black_level[2],
7509           blackLevelAppliedPattern->cam_black_level[3]);
7510         camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7511                 BLACK_LEVEL_PATTERN_CNT);
7512 
7513 #ifndef USE_HAL_3_3
7514         // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
7515         // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
7516         // depth space.
7517         fwk_blackLevelInd[0] /= 16.0;
7518         fwk_blackLevelInd[1] /= 16.0;
7519         fwk_blackLevelInd[2] /= 16.0;
7520         fwk_blackLevelInd[3] /= 16.0;
7521         camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7522                 BLACK_LEVEL_PATTERN_CNT);
7523 #endif
7524     }
7525 
7526 #ifndef USE_HAL_3_3
7527     // Fixed whitelevel is used by ISP/Sensor
7528     camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7529             &gCamCapability[mCameraId]->white_level, 1);
7530 #endif
7531 
7532     IF_META_AVAILABLE(cam_eis_crop_info_t, eisCropInfo,
7533             CAM_INTF_META_EIS_CROP_INFO, metadata) {
7534         mLastEISCropInfo = *eisCropInfo;
7535 
7536         mCropRegionMapper.toActiveArray(mLastEISCropInfo.delta_x, mLastEISCropInfo.delta_y,
7537                 mLastEISCropInfo.delta_width, mLastEISCropInfo.delta_height);
7538     }
7539 
7540     IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7541             CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7542         int32_t scalerCropRegion[4];
7543         scalerCropRegion[0] = hScalerCropRegion->left;
7544         scalerCropRegion[1] = hScalerCropRegion->top;
7545         scalerCropRegion[2] = hScalerCropRegion->width;
7546         scalerCropRegion[3] = hScalerCropRegion->height;
7547 
7548         // Adjust crop region from sensor output coordinate system to active
7549         // array coordinate system.
7550         mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7551                 scalerCropRegion[2], scalerCropRegion[3]);
7552 
7553         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7554     }
7555 
7556     IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7557         LOGD("sensorExpTime = %lld", *sensorExpTime);
7558         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7559     }
7560 
7561     IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7562         LOGD("expTimeBoost = %f", *expTimeBoost);
7563         camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7564     }
7565 
7566     IF_META_AVAILABLE(int64_t, sensorFameDuration,
7567             CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7568         LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7569         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7570     }
7571 
7572     IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7573             CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7574         LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7575         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7576                 sensorRollingShutterSkew, 1);
7577     }
7578 
7579     IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7580         LOGD("sensorSensitivity = %d", *sensorSensitivity);
7581         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7582 
7583         //calculate the noise profile based on sensitivity
7584         double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7585         double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7586         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7587         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7588             noise_profile[i]   = noise_profile_S;
7589             noise_profile[i+1] = noise_profile_O;
7590         }
7591         LOGD("noise model entry (S, O) is (%f, %f)",
7592                 noise_profile_S, noise_profile_O);
7593         camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7594                 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7595     }
7596 
7597 #ifndef USE_HAL_3_3
7598     int32_t fwk_ispSensitivity = 100;
7599     IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
7600         fwk_ispSensitivity = (int32_t) *ispSensitivity;
7601     }
7602     IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7603         fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7604     }
7605     camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
7606 #endif
7607 
7608     IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7609         uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7610         camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7611     }
7612 
7613     IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7614         int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7615                 *faceDetectMode);
7616         if (NAME_NOT_FOUND != val) {
7617             uint8_t fwk_faceDetectMode = (uint8_t)val;
7618             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7619 
7620             if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7621                 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7622                         CAM_INTF_META_FACE_DETECTION, metadata) {
7623                     uint8_t numFaces = MIN(
7624                             faceDetectionInfo->num_faces_detected, MAX_ROI);
7625                     int32_t faceIds[MAX_ROI];
7626                     uint8_t faceScores[MAX_ROI];
7627                     int32_t faceRectangles[MAX_ROI * 4];
7628                     int32_t faceLandmarks[MAX_ROI * 6];
7629                     size_t j = 0, k = 0;
7630 
7631                     for (size_t i = 0; i < numFaces; i++) {
7632                         faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7633                         // Adjust crop region from sensor output coordinate system to active
7634                         // array coordinate system.
7635                         cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
7636                         mCropRegionMapper.toActiveArray(rect.left, rect.top,
7637                                 rect.width, rect.height);
7638 
7639                         convertToRegions(rect, faceRectangles+j, -1);
7640 
7641                         LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7642                                 "bottom-right (%d, %d)",
7643                                 faceDetectionInfo->frame_id, i,
7644                                 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7645                                 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7646 
7647                         j+= 4;
7648                     }
7649                     if (numFaces <= 0) {
7650                         memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7651                         memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7652                         memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7653                         memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7654                     }
7655 
7656                     camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7657                             numFaces);
7658                     camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7659                             faceRectangles, numFaces * 4U);
7660                     if (fwk_faceDetectMode ==
7661                             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7662                         IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7663                                 CAM_INTF_META_FACE_LANDMARK, metadata) {
7664 
7665                             for (size_t i = 0; i < numFaces; i++) {
7666                                 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
7667                                 // Map the co-ordinate sensor output coordinate system to active
7668                                 // array coordinate system.
7669                                 mCropRegionMapper.toActiveArray(
7670                                         face_landmarks.left_eye_center.x,
7671                                         face_landmarks.left_eye_center.y);
7672                                 mCropRegionMapper.toActiveArray(
7673                                         face_landmarks.right_eye_center.x,
7674                                         face_landmarks.right_eye_center.y);
7675                                 mCropRegionMapper.toActiveArray(
7676                                         face_landmarks.mouth_center.x,
7677                                         face_landmarks.mouth_center.y);
7678 
7679                                 convertLandmarks(face_landmarks, faceLandmarks+k);
7680 
7681                                 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7682                                         "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7683                                         faceDetectionInfo->frame_id, i,
7684                                         faceLandmarks[k + LEFT_EYE_X],
7685                                         faceLandmarks[k + LEFT_EYE_Y],
7686                                         faceLandmarks[k + RIGHT_EYE_X],
7687                                         faceLandmarks[k + RIGHT_EYE_Y],
7688                                         faceLandmarks[k + MOUTH_X],
7689                                         faceLandmarks[k + MOUTH_Y]);
7690 
7691                                 k+= TOTAL_LANDMARK_INDICES;
7692                             }
7693                         } else {
7694                             for (size_t i = 0; i < numFaces; i++) {
7695                                 setInvalidLandmarks(faceLandmarks+k);
7696                                 k+= TOTAL_LANDMARK_INDICES;
7697                             }
7698                         }
7699 
7700                         for (size_t i = 0; i < numFaces; i++) {
7701                             faceIds[i] = faceDetectionInfo->faces[i].face_id;
7702 
7703                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7704                                     faceDetectionInfo->frame_id, i, faceIds[i]);
7705                         }
7706 
7707                         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7708                         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7709                                 faceLandmarks, numFaces * 6U);
7710                     }
7711                     IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7712                             CAM_INTF_META_FACE_BLINK, metadata) {
7713                         uint8_t detected[MAX_ROI];
7714                         uint8_t degree[MAX_ROI * 2];
7715                         for (size_t i = 0; i < numFaces; i++) {
7716                             detected[i] = blinks->blink[i].blink_detected;
7717                             degree[2 * i] = blinks->blink[i].left_blink;
7718                             degree[2 * i + 1] = blinks->blink[i].right_blink;
7719 
7720                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7721                                     "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7722                                     faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7723                                     degree[2 * i + 1]);
7724                         }
7725                         camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7726                                 detected, numFaces);
7727                         camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7728                                 degree, numFaces * 2);
7729                     }
7730                     IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7731                             CAM_INTF_META_FACE_SMILE, metadata) {
7732                         uint8_t degree[MAX_ROI];
7733                         uint8_t confidence[MAX_ROI];
7734                         for (size_t i = 0; i < numFaces; i++) {
7735                             degree[i] = smiles->smile[i].smile_degree;
7736                             confidence[i] = smiles->smile[i].smile_confidence;
7737 
7738                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7739                                     "smile_degree=%d, smile_score=%d",
7740                                     faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
7741                         }
7742                         camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7743                                 degree, numFaces);
7744                         camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7745                                 confidence, numFaces);
7746                     }
7747                     IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7748                             CAM_INTF_META_FACE_GAZE, metadata) {
7749                         int8_t angle[MAX_ROI];
7750                         int32_t direction[MAX_ROI * 3];
7751                         int8_t degree[MAX_ROI * 2];
7752                         for (size_t i = 0; i < numFaces; i++) {
7753                             angle[i] = gazes->gaze[i].gaze_angle;
7754                             direction[3 * i] = gazes->gaze[i].updown_dir;
7755                             direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7756                             direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7757                             degree[2 * i] = gazes->gaze[i].left_right_gaze;
7758                             degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
7759 
7760                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7761                                     "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7762                                     "left_right_gaze=%d, top_bottom_gaze=%d",
7763                                     faceDetectionInfo->frame_id, i, angle[i],
7764                                     direction[3 * i], direction[3 * i + 1],
7765                                     direction[3 * i + 2],
7766                                     degree[2 * i], degree[2 * i + 1]);
7767                         }
7768                         camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7769                                 (uint8_t *)angle, numFaces);
7770                         camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7771                                 direction, numFaces * 3);
7772                         camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7773                                 (uint8_t *)degree, numFaces * 2);
7774                     }
7775                 }
7776             }
7777         }
7778     }
7779 
7780     IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7781         uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
7782         int32_t histogramBins = 0;
7783         camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
7784         camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
7785 
7786         IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7787             histogramBins = *histBins;
7788             camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7789         }
7790 
7791         if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
7792             IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7793                 // process histogram statistics info
7794                 int32_t* histogramData = NULL;
7795 
7796                 switch (stats_data->type) {
7797                 case CAM_HISTOGRAM_TYPE_BAYER:
7798                     switch (stats_data->bayer_stats.data_type) {
7799                         case CAM_STATS_CHANNEL_GR:
7800                           histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7801                           break;
7802                         case CAM_STATS_CHANNEL_GB:
7803                           histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7804                           break;
7805                         case CAM_STATS_CHANNEL_B:
7806                           histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7807                           break;
7808                         case CAM_STATS_CHANNEL_Y:
7809                         case CAM_STATS_CHANNEL_ALL:
7810                         case CAM_STATS_CHANNEL_R:
7811                         default:
7812                           histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7813                           break;
7814                     }
7815                     break;
7816                 case CAM_HISTOGRAM_TYPE_YUV:
7817                     histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
7818                     break;
7819                 }
7820 
7821                 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
7822             }
7823         }
7824     }
7825 
7826     IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7827             CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7828         uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7829         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7830     }
7831 
7832     IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7833             CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7834         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7835                 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7836     }
7837 
7838     IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7839             CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7840         size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7841                 CAM_MAX_SHADING_MAP_HEIGHT);
7842         size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7843                 CAM_MAX_SHADING_MAP_WIDTH);
7844         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7845                 lensShadingMap->lens_shading, 4U * map_width * map_height);
7846     }
7847 
7848     IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7849         uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7850         camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7851     }
7852 
7853     IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7854         //Populate CAM_INTF_META_TONEMAP_CURVES
7855         /* ch0 = G, ch 1 = B, ch 2 = R*/
7856         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7857             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7858                      tonemap->tonemap_points_cnt,
7859                     CAM_MAX_TONEMAP_CURVE_SIZE);
7860             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7861         }
7862 
7863         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7864                         &tonemap->curves[0].tonemap_points[0][0],
7865                         tonemap->tonemap_points_cnt * 2);
7866 
7867         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7868                         &tonemap->curves[1].tonemap_points[0][0],
7869                         tonemap->tonemap_points_cnt * 2);
7870 
7871         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7872                         &tonemap->curves[2].tonemap_points[0][0],
7873                         tonemap->tonemap_points_cnt * 2);
7874     }
7875 
7876     IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7877             CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7878         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7879                 CC_GAIN_MAX);
7880     }
7881 
7882     IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7883             CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7884         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7885                 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7886                 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7887     }
7888 
7889     IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7890             CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7891         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7892             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7893                      toneCurve->tonemap_points_cnt,
7894                     CAM_MAX_TONEMAP_CURVE_SIZE);
7895             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7896         }
7897         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7898                 (float*)toneCurve->curve.tonemap_points,
7899                 toneCurve->tonemap_points_cnt * 2);
7900     }
7901 
7902     IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7903             CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7904         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7905                 predColorCorrectionGains->gains, 4);
7906     }
7907 
7908     IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7909             CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7910         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7911                 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7912                 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7913     }
7914 
7915     IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7916         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7917     }
7918 
7919     IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7920         uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7921         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7922     }
7923 
7924     IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7925         uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7926         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7927     }
7928 
7929     IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7930         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7931                 *effectMode);
7932         if (NAME_NOT_FOUND != val) {
7933             uint8_t fwk_effectMode = (uint8_t)val;
7934             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7935         }
7936     }
7937 
7938     IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7939             CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7940         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7941                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7942         if (NAME_NOT_FOUND != fwk_testPatternMode) {
7943             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7944         }
7945         int32_t fwk_testPatternData[4];
7946         fwk_testPatternData[0] = testPatternData->r;
7947         fwk_testPatternData[3] = testPatternData->b;
7948         switch (gCamCapability[mCameraId]->color_arrangement) {
7949         case CAM_FILTER_ARRANGEMENT_RGGB:
7950         case CAM_FILTER_ARRANGEMENT_GRBG:
7951             fwk_testPatternData[1] = testPatternData->gr;
7952             fwk_testPatternData[2] = testPatternData->gb;
7953             break;
7954         case CAM_FILTER_ARRANGEMENT_GBRG:
7955         case CAM_FILTER_ARRANGEMENT_BGGR:
7956             fwk_testPatternData[2] = testPatternData->gr;
7957             fwk_testPatternData[1] = testPatternData->gb;
7958             break;
7959         default:
7960             LOGE("color arrangement %d is not supported",
7961                 gCamCapability[mCameraId]->color_arrangement);
7962             break;
7963         }
7964         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7965     }
7966 
7967     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7968         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7969     }
7970 
7971     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7972         String8 str((const char *)gps_methods);
7973         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7974     }
7975 
7976     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7977         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7978     }
7979 
7980     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7981         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7982     }
7983 
7984     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7985         uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7986         camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7987     }
7988 
7989     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7990         uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7991         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7992     }
7993 
7994     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7995         int32_t fwk_thumb_size[2];
7996         fwk_thumb_size[0] = thumb_size->width;
7997         fwk_thumb_size[1] = thumb_size->height;
7998         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7999     }
8000 
8001     // Skip reprocess metadata if there is no input stream.
8002     if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
8003         IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
8004             camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
8005                     privateData,
8006                     MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
8007         }
8008     }
8009 
8010     IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
8011         camMetadata.update(QCAMERA3_EXPOSURE_METER,
8012                 meteringMode, 1);
8013     }
8014 
8015     IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
8016             CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
8017         LOGD("hdr_scene_data: %d %f\n",
8018                 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
8019         uint8_t isHdr = hdr_scene_data->is_hdr_scene;
8020         float isHdrConfidence = hdr_scene_data->hdr_confidence;
8021         camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
8022                            &isHdr, 1);
8023         camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
8024                            &isHdrConfidence, 1);
8025     }
8026 
8027 
8028 
8029     if (metadata->is_tuning_params_valid) {
8030         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
8031         uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
8032         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
8033 
8034 
8035         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
8036                 sizeof(uint32_t));
8037         data += sizeof(uint32_t);
8038 
8039         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
8040                 sizeof(uint32_t));
8041         LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8042         data += sizeof(uint32_t);
8043 
8044         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
8045                 sizeof(uint32_t));
8046         LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8047         data += sizeof(uint32_t);
8048 
8049         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
8050                 sizeof(uint32_t));
8051         LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8052         data += sizeof(uint32_t);
8053 
8054         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
8055                 sizeof(uint32_t));
8056         LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8057         data += sizeof(uint32_t);
8058 
8059         metadata->tuning_params.tuning_mod3_data_size = 0;
8060         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
8061                 sizeof(uint32_t));
8062         LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8063         data += sizeof(uint32_t);
8064 
8065         size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
8066                 TUNING_SENSOR_DATA_MAX);
8067         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
8068                 count);
8069         data += count;
8070 
8071         count = MIN(metadata->tuning_params.tuning_vfe_data_size,
8072                 TUNING_VFE_DATA_MAX);
8073         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
8074                 count);
8075         data += count;
8076 
8077         count = MIN(metadata->tuning_params.tuning_cpp_data_size,
8078                 TUNING_CPP_DATA_MAX);
8079         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
8080                 count);
8081         data += count;
8082 
8083         count = MIN(metadata->tuning_params.tuning_cac_data_size,
8084                 TUNING_CAC_DATA_MAX);
8085         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
8086                 count);
8087         data += count;
8088 
8089         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
8090                 (int32_t *)(void *)tuning_meta_data_blob,
8091                 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
8092     }
8093 
8094     IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
8095             CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
8096         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
8097                 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
8098                 NEUTRAL_COL_POINTS);
8099     }
8100 
8101     IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
8102         uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
8103         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
8104     }
8105 
8106     IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
8107         int32_t aeRegions[REGIONS_TUPLE_COUNT];
8108         // Adjust crop region from sensor output coordinate system to active
8109         // array coordinate system.
8110         cam_rect_t hAeRect = hAeRegions->rect;
8111         mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
8112                 hAeRect.width, hAeRect.height);
8113 
8114         convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
8115         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
8116                 REGIONS_TUPLE_COUNT);
8117         LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8118                  aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
8119                 hAeRect.left, hAeRect.top, hAeRect.width,
8120                 hAeRect.height);
8121     }
8122 
8123     if (!pendingRequest.focusStateSent) {
8124         if (pendingRequest.focusStateValid) {
8125             camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
8126             LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
8127         } else {
8128             IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8129                 uint8_t fwk_afState = (uint8_t) *afState;
8130                 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
8131                 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
8132             }
8133         }
8134     }
8135 
8136     IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
8137         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
8138         mLastFocusDistance = *focusDistance;
8139     } else {
8140         LOGE("Missing LENS_FOCUS_DISTANCE metadata. Use last known distance of %f",
8141                 mLastFocusDistance);
8142         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , &mLastFocusDistance, 1);
8143     }
8144 
8145     IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
8146         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
8147     }
8148 
8149     IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
8150         uint8_t fwk_lensState = *lensState;
8151         camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
8152     }
8153 
8154     IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
8155         uint32_t ab_mode = *hal_ab_mode;
8156         if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
8157                 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
8158               ab_mode = CAM_ANTIBANDING_MODE_AUTO;
8159         }
8160         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8161                 ab_mode);
8162         if (NAME_NOT_FOUND != val) {
8163             uint8_t fwk_ab_mode = (uint8_t)val;
8164             camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
8165         }
8166     }
8167 
8168     IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
8169         int val = lookupFwkName(SCENE_MODES_MAP,
8170                 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
8171         if (NAME_NOT_FOUND != val) {
8172             uint8_t fwkBestshotMode = (uint8_t)val;
8173             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
8174             LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
8175         } else {
8176             LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
8177         }
8178     }
8179 
8180     IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
8181          uint8_t fwk_mode = (uint8_t) *mode;
8182          camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
8183     }
8184 
8185     /* Constant metadata values to be update*/
8186 
8187     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8188     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8189 
8190     int32_t hotPixelMap[2];
8191     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
8192 
8193     // CDS
8194     IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
8195         camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
8196     }
8197 
8198     IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
8199         int32_t fwk_hdr;
8200         int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
8201         if(*vhdr == CAM_SENSOR_HDR_OFF) {
8202             fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
8203         } else {
8204             fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
8205         }
8206 
8207         if(fwk_hdr != curr_hdr_state) {
8208            LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
8209            if(fwk_hdr)
8210               mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
8211            else
8212               mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
8213         }
8214         camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
8215     }
8216 
8217     //binning correction
8218     IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
8219             CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
8220         int32_t fwk_bin_mode = (int32_t) *bin_correction;
8221         camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
8222     }
8223 
8224     IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
8225         int32_t fwk_ir = (int32_t) *ir;
8226         int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
8227         int8_t is_ir_on = 0;
8228 
8229         (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
8230         if(is_ir_on != curr_ir_state) {
8231            LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
8232            if(is_ir_on)
8233               mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
8234            else
8235               mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
8236         }
8237         camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
8238     }
8239 
8240     // AEC SPEED
8241     IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
8242         camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
8243     }
8244 
8245     // AWB SPEED
8246     IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
8247         camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
8248     }
8249 
8250     // TNR
8251     IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
8252         uint8_t tnr_enable       = tnr->denoise_enable;
8253         int32_t tnr_process_type = (int32_t)tnr->process_plates;
8254         int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
8255         int8_t is_tnr_on = 0;
8256 
8257         (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
8258         if(is_tnr_on != curr_tnr_state) {
8259            LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
8260            if(is_tnr_on)
8261               mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
8262            else
8263               mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
8264         }
8265 
8266         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8267         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8268     }
8269 
8270     // Reprocess crop data
8271     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
8272         uint8_t cnt = crop_data->num_of_streams;
8273         if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
8274             // mm-qcamera-daemon only posts crop_data for streams
8275             // not linked to pproc. So no valid crop metadata is not
8276             // necessarily an error case.
8277             LOGD("No valid crop metadata entries");
8278         } else {
8279             uint32_t reproc_stream_id;
8280             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8281                 LOGD("No reprocessible stream found, ignore crop data");
8282             } else {
8283                 int rc = NO_ERROR;
8284                 Vector<int32_t> roi_map;
8285                 int32_t *crop = new int32_t[cnt*4];
8286                 if (NULL == crop) {
8287                    rc = NO_MEMORY;
8288                 }
8289                 if (NO_ERROR == rc) {
8290                     int32_t streams_found = 0;
8291                     for (size_t i = 0; i < cnt; i++) {
8292                         if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
8293                             if (pprocDone) {
8294                                 // HAL already does internal reprocessing,
8295                                 // either via reprocessing before JPEG encoding,
8296                                 // or offline postprocessing for pproc bypass case.
8297                                 crop[0] = 0;
8298                                 crop[1] = 0;
8299                                 crop[2] = mInputStreamInfo.dim.width;
8300                                 crop[3] = mInputStreamInfo.dim.height;
8301                             } else {
8302                                 crop[0] = crop_data->crop_info[i].crop.left;
8303                                 crop[1] = crop_data->crop_info[i].crop.top;
8304                                 crop[2] = crop_data->crop_info[i].crop.width;
8305                                 crop[3] = crop_data->crop_info[i].crop.height;
8306                             }
8307                             roi_map.add(crop_data->crop_info[i].roi_map.left);
8308                             roi_map.add(crop_data->crop_info[i].roi_map.top);
8309                             roi_map.add(crop_data->crop_info[i].roi_map.width);
8310                             roi_map.add(crop_data->crop_info[i].roi_map.height);
8311                             streams_found++;
8312                             LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
8313                                     crop[0], crop[1], crop[2], crop[3]);
8314                             LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
8315                                     crop_data->crop_info[i].roi_map.left,
8316                                     crop_data->crop_info[i].roi_map.top,
8317                                     crop_data->crop_info[i].roi_map.width,
8318                                     crop_data->crop_info[i].roi_map.height);
8319                             break;
8320 
8321                        }
8322                     }
8323                     camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
8324                             &streams_found, 1);
8325                     camMetadata.update(QCAMERA3_CROP_REPROCESS,
8326                             crop, (size_t)(streams_found * 4));
8327                     if (roi_map.array()) {
8328                         camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
8329                                 roi_map.array(), roi_map.size());
8330                     }
8331                }
8332                if (crop) {
8333                    delete [] crop;
8334                }
8335             }
8336         }
8337     }
8338 
8339     if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8340         // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8341         // so hardcoding the CAC result to OFF mode.
8342         uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8343         camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8344     } else {
8345         IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8346             int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8347                     *cacMode);
8348             if (NAME_NOT_FOUND != val) {
8349                 uint8_t resultCacMode = (uint8_t)val;
8350                 // check whether CAC result from CB is equal to Framework set CAC mode
8351                 // If not equal then set the CAC mode came in corresponding request
8352                 if (pendingRequest.fwkCacMode != resultCacMode) {
8353                     resultCacMode = pendingRequest.fwkCacMode;
8354                 }
8355                 //Check if CAC is disabled by property
8356                 if (m_cacModeDisabled) {
8357                     resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8358                 }
8359 
8360                 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
8361                 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8362             } else {
8363                 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8364             }
8365         }
8366     }
8367 
8368     // Post blob of cam_cds_data through vendor tag.
8369     IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8370         uint8_t cnt = cdsInfo->num_of_streams;
8371         cam_cds_data_t cdsDataOverride;
8372         memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8373         cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8374         cdsDataOverride.num_of_streams = 1;
8375         if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8376             uint32_t reproc_stream_id;
8377             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8378                 LOGD("No reprocessible stream found, ignore cds data");
8379             } else {
8380                 for (size_t i = 0; i < cnt; i++) {
8381                     if (cdsInfo->cds_info[i].stream_id ==
8382                             reproc_stream_id) {
8383                         cdsDataOverride.cds_info[0].cds_enable =
8384                                 cdsInfo->cds_info[i].cds_enable;
8385                         break;
8386                     }
8387                 }
8388             }
8389         } else {
8390             LOGD("Invalid stream count %d in CDS_DATA", cnt);
8391         }
8392         camMetadata.update(QCAMERA3_CDS_INFO,
8393                 (uint8_t *)&cdsDataOverride,
8394                 sizeof(cam_cds_data_t));
8395     }
8396 
8397     // Ldaf calibration data
8398     if (!mLdafCalibExist) {
8399         IF_META_AVAILABLE(uint32_t, ldafCalib,
8400                 CAM_INTF_META_LDAF_EXIF, metadata) {
8401             mLdafCalibExist = true;
8402             mLdafCalib[0] = ldafCalib[0];
8403             mLdafCalib[1] = ldafCalib[1];
8404             LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8405                     ldafCalib[0], ldafCalib[1]);
8406         }
8407     }
8408 
8409     // EXIF debug data through vendor tag
8410     /*
8411      * Mobicat Mask can assume 3 values:
8412      * 1 refers to Mobicat data,
8413      * 2 refers to Stats Debug and Exif Debug Data
8414      * 3 refers to Mobicat and Stats Debug Data
8415      * We want to make sure that we are sending Exif debug data
8416      * only when Mobicat Mask is 2.
8417      */
8418     if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8419         camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8420                 (uint8_t *)(void *)mExifParams.debug_params,
8421                 sizeof(mm_jpeg_debug_exif_params_t));
8422     }
8423 
8424     // Reprocess and DDM debug data through vendor tag
8425     cam_reprocess_info_t repro_info;
8426     memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
8427     IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8428             CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
8429         memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
8430     }
8431     IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8432             CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
8433         memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
8434     }
8435     IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8436             CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
8437         memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
8438     }
8439     IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8440             CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
8441         memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
8442     }
8443     IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8444             CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
8445         memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
8446     }
8447     IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
8448         memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
8449     }
8450     IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8451             CAM_INTF_PARM_ROTATION, metadata) {
8452         memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
8453     }
8454     IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8455         memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8456     }
8457     IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8458         memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8459     }
8460     camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8461         (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
8462 
8463     // INSTANT AEC MODE
8464     IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8465             CAM_INTF_PARM_INSTANT_AEC, metadata) {
8466         camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8467     }
8468 
8469     // AF scene change
8470     IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8471         camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8472         camMetadata.update(ANDROID_CONTROL_AF_SCENE_CHANGE, afSceneChange, 1);
8473     } else {
8474         uint8_t noSceneChange = 0;
8475         camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, &noSceneChange, 1);
8476         camMetadata.update(ANDROID_CONTROL_AF_SCENE_CHANGE, &noSceneChange, 1);
8477         LOGE("Missing AF_SCENE_CHANGE metadata!");
8478     }
8479 
8480     // Enable ZSL
8481     if (enableZsl != nullptr) {
8482         uint8_t value = *enableZsl ?
8483                 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8484         camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8485     }
8486 
8487     camMetadata.update(ANDROID_STATISTICS_OIS_DATA_MODE, &pendingRequest.requestedOisDataMode, 1);
8488 
8489     // OIS Data
8490     IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8491         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8492             &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8493         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8494             frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8495         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8496             frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8497         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8498             frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
8499 
8500         if (pendingRequest.requestedOisDataMode == ANDROID_STATISTICS_OIS_DATA_MODE_ON) {
8501             int64_t timeDiff = pendingRequest.timestamp -
8502                     frame_ois_data->frame_sof_timestamp_boottime;
8503 
8504             std::vector<int64_t> oisTimestamps;
8505 
8506             for (int32_t i = 0; i < frame_ois_data->num_ois_sample; i++) {
8507                 oisTimestamps.push_back(
8508                         frame_ois_data->ois_sample_timestamp_boottime[i] + timeDiff);
8509             }
8510 
8511             camMetadata.update(ANDROID_STATISTICS_OIS_TIMESTAMPS,
8512                     oisTimestamps.data(), frame_ois_data->num_ois_sample);
8513             camMetadata.update(ANDROID_STATISTICS_OIS_X_SHIFTS,
8514                     frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8515             camMetadata.update(ANDROID_STATISTICS_OIS_Y_SHIFTS,
8516                     frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
8517         } else {
8518             // If OIS data mode is OFF, add NULL for OIS keys.
8519             camMetadata.update(ANDROID_STATISTICS_OIS_TIMESTAMPS,
8520                     frame_ois_data->ois_sample_timestamp_boottime, 0);
8521             camMetadata.update(ANDROID_STATISTICS_OIS_X_SHIFTS,
8522                     frame_ois_data->ois_sample_shift_pixel_x, 0);
8523             camMetadata.update(ANDROID_STATISTICS_OIS_Y_SHIFTS,
8524                     frame_ois_data->ois_sample_shift_pixel_y, 0);
8525         }
8526     }
8527 
8528     // DevCamDebug metadata translateFromHalMetadata AEC MOTION
8529     IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
8530             CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
8531         float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
8532         camMetadata.update(NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_X,
8533                            &fwk_DevCamDebug_aec_camera_motion_dx, 1);
8534     }
8535     IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
8536             CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
8537         float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
8538         camMetadata.update(NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_Y,
8539                            &fwk_DevCamDebug_aec_camera_motion_dy, 1);
8540     }
8541     IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
8542             CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
8543         float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
8544         camMetadata.update(NEXUS_EXPERIMENTAL_2017_SUBJECT_MOTION,
8545                            &fwk_DevCamDebug_aec_subject_motion, 1);
8546     }
8547 
8548     // Camera lens calibration dynamic fields, for back camera. Same values as for static metadata.
8549     if (mCameraId == 0) {
8550         const camera_metadata_t *staticInfo = gStaticMetadata[mCameraId];
8551         camera_metadata_ro_entry_t rotation, translation, intrinsics, distortion, reference;
8552         int res;
8553         bool fail = false;
8554         res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_POSE_ROTATION,
8555                 &rotation);
8556         if (res != 0) {
8557             fail = true;
8558         }
8559         res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_POSE_TRANSLATION,
8560                 &translation);
8561         if (res != 0) {
8562             fail = true;
8563         }
8564         res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_INTRINSIC_CALIBRATION,
8565                 &intrinsics);
8566         if (res != 0) {
8567             fail = true;
8568         }
8569         res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_DISTORTION,
8570                 &distortion);
8571         if (res != 0) {
8572             fail = true;
8573         }
8574         res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_POSE_REFERENCE,
8575                 &reference);
8576         if (res != 0) {
8577             fail = true;
8578         }
8579 
8580         if (!fail) {
8581             camMetadata.update(ANDROID_LENS_POSE_ROTATION,
8582                     rotation.data.f, rotation.count);
8583             camMetadata.update(ANDROID_LENS_POSE_TRANSLATION,
8584                     translation.data.f, translation.count);
8585             camMetadata.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
8586                     intrinsics.data.f, intrinsics.count);
8587             camMetadata.update(ANDROID_LENS_DISTORTION,
8588                     distortion.data.f, distortion.count);
8589             camMetadata.update(ANDROID_LENS_POSE_REFERENCE,
8590                     reference.data.u8, reference.count);
8591         }
8592     }
8593 
8594     resultMetadata = camMetadata.release();
8595     return resultMetadata;
8596 }
8597 
8598 /*===========================================================================
8599  * FUNCTION   : saveExifParams
8600  *
8601  * DESCRIPTION:
8602  *
8603  * PARAMETERS :
8604  *   @metadata : metadata information from callback
8605  *
8606  * RETURN     : none
8607  *
8608  *==========================================================================*/
saveExifParams(metadata_buffer_t * metadata)8609 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8610 {
8611     IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8612             CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8613         if (mExifParams.debug_params) {
8614             mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8615             mExifParams.debug_params->ae_debug_params_valid = TRUE;
8616         }
8617     }
8618     IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8619             CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8620         if (mExifParams.debug_params) {
8621             mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8622             mExifParams.debug_params->awb_debug_params_valid = TRUE;
8623         }
8624     }
8625     IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8626             CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8627         if (mExifParams.debug_params) {
8628             mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8629             mExifParams.debug_params->af_debug_params_valid = TRUE;
8630         }
8631     }
8632     IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8633             CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8634         if (mExifParams.debug_params) {
8635             mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8636             mExifParams.debug_params->asd_debug_params_valid = TRUE;
8637         }
8638     }
8639     IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8640             CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8641         if (mExifParams.debug_params) {
8642             mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8643             mExifParams.debug_params->stats_debug_params_valid = TRUE;
8644         }
8645     }
8646     IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8647             CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8648         if (mExifParams.debug_params) {
8649             mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8650             mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8651         }
8652     }
8653     IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8654             CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8655         if (mExifParams.debug_params) {
8656             mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8657             mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8658         }
8659     }
8660     IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8661             CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8662         if (mExifParams.debug_params) {
8663             mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8664             mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8665         }
8666     }
8667 }
8668 
8669 /*===========================================================================
8670  * FUNCTION   : get3AExifParams
8671  *
8672  * DESCRIPTION:
8673  *
8674  * PARAMETERS : none
8675  *
8676  *
8677  * RETURN     : mm_jpeg_exif_params_t
8678  *
8679  *==========================================================================*/
get3AExifParams()8680 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8681 {
8682     return mExifParams;
8683 }
8684 
8685 /*===========================================================================
8686  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
8687  *
8688  * DESCRIPTION:
8689  *
8690  * PARAMETERS :
8691  *   @metadata : metadata information from callback
8692  *   @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8693  *                               urgent metadata in a batch. Always true for
8694  *                               non-batch mode.
8695  *   @frame_number :             frame number for this urgent metadata
8696  *   @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8697  *                         i.e. even though it doesn't map to a valid partial
8698  *                         frame number, its metadata entries should be kept.
8699  * RETURN     : camera_metadata_t*
8700  *              metadata in a format specified by fwk
8701  *==========================================================================*/
8702 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata,bool lastUrgentMetadataInBatch,uint32_t frame_number,bool isJumpstartMetadata)8703 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
8704                                 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
8705                                  uint32_t frame_number, bool isJumpstartMetadata)
8706 {
8707     CameraMetadata camMetadata;
8708     camera_metadata_t *resultMetadata;
8709 
8710     if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
8711         /* In batch mode, use empty metadata if this is not the last in batch
8712          */
8713         resultMetadata = allocate_camera_metadata(0, 0);
8714         return resultMetadata;
8715     }
8716 
8717     IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8718         uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8719         camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8720         LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8721     }
8722 
8723     IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8724         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8725                 &aecTrigger->trigger, 1);
8726         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8727                 &aecTrigger->trigger_id, 1);
8728         LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8729                  aecTrigger->trigger);
8730         LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8731                 aecTrigger->trigger_id);
8732     }
8733 
8734     IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8735         uint8_t fwk_ae_state = (uint8_t) *ae_state;
8736         camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8737         LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8738     }
8739 
8740     IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8741         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8742         if (NAME_NOT_FOUND != val) {
8743             uint8_t fwkAfMode = (uint8_t)val;
8744             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8745             LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8746         } else {
8747             LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8748                     val);
8749         }
8750     }
8751 
8752     IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8753         LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8754             af_trigger->trigger);
8755         LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8756             af_trigger->trigger_id);
8757 
8758         IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8759             mAfTrigger = *af_trigger;
8760             uint32_t fwk_AfState = (uint32_t) *afState;
8761 
8762             // If this is the result for a new trigger, check if there is new early
8763             // af state. If there is, use the last af state for all results
8764             // preceding current partial frame number.
8765             for (auto & pendingRequest : mPendingRequestsList) {
8766                 if (pendingRequest.frame_number < frame_number) {
8767                     pendingRequest.focusStateValid = true;
8768                     pendingRequest.focusState = fwk_AfState;
8769                 } else if (pendingRequest.frame_number == frame_number) {
8770                     IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8771                         // Check if early AF state for trigger exists. If yes, send AF state as
8772                         // partial result for better latency.
8773                         uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8774                         pendingRequest.focusStateSent = true;
8775                         camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8776                         LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8777                                  frame_number, fwkEarlyAfState);
8778                     }
8779                 }
8780             }
8781         }
8782     }
8783     camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8784         &mAfTrigger.trigger, 1);
8785     camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8786 
8787     IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8788         /*af regions*/
8789         cam_rect_t hAfRect = hAfRegions->rect;
8790         int32_t afRegions[REGIONS_TUPLE_COUNT];
8791         // Adjust crop region from sensor output coordinate system to active
8792         // array coordinate system.
8793         mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8794                 hAfRect.width, hAfRect.height);
8795 
8796         convertToRegions(hAfRect, afRegions, hAfRegions->weight);
8797         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8798                 REGIONS_TUPLE_COUNT);
8799         LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8800                  afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8801                 hAfRect.left, hAfRect.top, hAfRect.width,
8802                 hAfRect.height);
8803     }
8804 
8805     // AF region confidence
8806     IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8807         camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8808     }
8809 
8810     IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8811         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8812                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8813         if (NAME_NOT_FOUND != val) {
8814             uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8815             camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8816             LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8817         } else {
8818             LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8819         }
8820     }
8821 
8822     uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8823     uint32_t aeMode = CAM_AE_MODE_MAX;
8824     int32_t flashMode = CAM_FLASH_MODE_MAX;
8825     int32_t redeye = -1;
8826     IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8827         aeMode = *pAeMode;
8828     }
8829     IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8830         flashMode = *pFlashMode;
8831     }
8832     IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8833         redeye = *pRedeye;
8834     }
8835 
8836     if (1 == redeye) {
8837         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8838         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8839     } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8840         int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8841                 flashMode);
8842         if (NAME_NOT_FOUND != val) {
8843             fwk_aeMode = (uint8_t)val;
8844             camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8845         } else {
8846             LOGE("Unsupported flash mode %d", flashMode);
8847         }
8848     } else if (aeMode == CAM_AE_MODE_ON) {
8849         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8850         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8851     } else if (aeMode == CAM_AE_MODE_OFF) {
8852         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8853         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8854     } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8855         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH;
8856         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8857     } else {
8858         LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8859               "flashMode:%d, aeMode:%u!!!",
8860                  redeye, flashMode, aeMode);
8861     }
8862     if (mInstantAEC) {
8863         // Increment frame Idx count untill a bound reached for instant AEC.
8864         mInstantAecFrameIdxCount++;
8865         IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8866                 CAM_INTF_META_AEC_INFO, metadata) {
8867             LOGH("ae_params->settled = %d",ae_params->settled);
8868             // If AEC settled, or if number of frames reached bound value,
8869             // should reset instant AEC.
8870             if (ae_params->settled ||
8871                     (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8872                 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8873                 mInstantAEC = false;
8874                 mResetInstantAEC = true;
8875                 mInstantAecFrameIdxCount = 0;
8876             }
8877         }
8878     }
8879 
8880     IF_META_AVAILABLE(int32_t, af_tof_confidence,
8881             CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
8882         IF_META_AVAILABLE(int32_t, af_tof_distance,
8883                 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
8884             int32_t fwk_af_tof_confidence = *af_tof_confidence;
8885             int32_t fwk_af_tof_distance = *af_tof_distance;
8886             if (fwk_af_tof_confidence == 1) {
8887                 mSceneDistance = fwk_af_tof_distance;
8888             } else {
8889                 mSceneDistance = -1;
8890             }
8891             LOGD("tof_distance %d, tof_confidence %d, mSceneDistance %d",
8892                      fwk_af_tof_distance, fwk_af_tof_confidence, mSceneDistance);
8893         }
8894     }
8895     camMetadata.update(NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE, &mSceneDistance, 1);
8896 
8897     resultMetadata = camMetadata.release();
8898     return resultMetadata;
8899 }
8900 
8901 /*===========================================================================
8902  * FUNCTION   : dumpMetadataToFile
8903  *
8904  * DESCRIPTION: Dumps tuning metadata to file system
8905  *
8906  * PARAMETERS :
8907  *   @meta           : tuning metadata
8908  *   @dumpFrameCount : current dump frame count
8909  *   @enabled        : Enable mask
8910  *
8911  *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,bool enabled,const char * type,uint32_t frameNumber)8912 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8913                                                    uint32_t &dumpFrameCount,
8914                                                    bool enabled,
8915                                                    const char *type,
8916                                                    uint32_t frameNumber)
8917 {
8918     //Some sanity checks
8919     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8920         LOGE("Tuning sensor data size bigger than expected %d: %d",
8921               meta.tuning_sensor_data_size,
8922               TUNING_SENSOR_DATA_MAX);
8923         return;
8924     }
8925 
8926     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8927         LOGE("Tuning VFE data size bigger than expected %d: %d",
8928               meta.tuning_vfe_data_size,
8929               TUNING_VFE_DATA_MAX);
8930         return;
8931     }
8932 
8933     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8934         LOGE("Tuning CPP data size bigger than expected %d: %d",
8935               meta.tuning_cpp_data_size,
8936               TUNING_CPP_DATA_MAX);
8937         return;
8938     }
8939 
8940     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8941         LOGE("Tuning CAC data size bigger than expected %d: %d",
8942               meta.tuning_cac_data_size,
8943               TUNING_CAC_DATA_MAX);
8944         return;
8945     }
8946     //
8947 
8948     if(enabled){
8949         char timeBuf[FILENAME_MAX];
8950         char buf[FILENAME_MAX];
8951         memset(buf, 0, sizeof(buf));
8952         memset(timeBuf, 0, sizeof(timeBuf));
8953         time_t current_time;
8954         struct tm * timeinfo;
8955         time (&current_time);
8956         timeinfo = localtime (&current_time);
8957         if (timeinfo != NULL) {
8958             strftime (timeBuf, sizeof(timeBuf),
8959                     QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8960         }
8961         String8 filePath(timeBuf);
8962         snprintf(buf,
8963                 sizeof(buf),
8964                 "%dm_%s_%d.bin",
8965                 dumpFrameCount,
8966                 type,
8967                 frameNumber);
8968         filePath.append(buf);
8969         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8970         if (file_fd >= 0) {
8971             ssize_t written_len = 0;
8972             meta.tuning_data_version = TUNING_DATA_VERSION;
8973             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8974             written_len += write(file_fd, data, sizeof(uint32_t));
8975             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8976             LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8977             written_len += write(file_fd, data, sizeof(uint32_t));
8978             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8979             LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8980             written_len += write(file_fd, data, sizeof(uint32_t));
8981             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8982             LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8983             written_len += write(file_fd, data, sizeof(uint32_t));
8984             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8985             LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8986             written_len += write(file_fd, data, sizeof(uint32_t));
8987             meta.tuning_mod3_data_size = 0;
8988             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8989             LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8990             written_len += write(file_fd, data, sizeof(uint32_t));
8991             size_t total_size = meta.tuning_sensor_data_size;
8992             data = (void *)((uint8_t *)&meta.data);
8993             written_len += write(file_fd, data, total_size);
8994             total_size = meta.tuning_vfe_data_size;
8995             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8996             written_len += write(file_fd, data, total_size);
8997             total_size = meta.tuning_cpp_data_size;
8998             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8999             written_len += write(file_fd, data, total_size);
9000             total_size = meta.tuning_cac_data_size;
9001             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
9002             written_len += write(file_fd, data, total_size);
9003             close(file_fd);
9004         }else {
9005             LOGE("fail to open file for metadata dumping");
9006         }
9007     }
9008 }
9009 
9010 /*===========================================================================
9011  * FUNCTION   : cleanAndSortStreamInfo
9012  *
9013  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
9014  *              and sort them such that raw stream is at the end of the list
9015  *              This is a workaround for camera daemon constraint.
9016  *
9017  * PARAMETERS : None
9018  *
9019  *==========================================================================*/
cleanAndSortStreamInfo()9020 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
9021 {
9022     List<stream_info_t *> newStreamInfo;
9023 
9024     /*clean up invalid streams*/
9025     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
9026             it != mStreamInfo.end();) {
9027         if(((*it)->status) == INVALID){
9028             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
9029             delete channel;
9030             free(*it);
9031             it = mStreamInfo.erase(it);
9032         } else {
9033             it++;
9034         }
9035     }
9036 
9037     // Move preview/video/callback/snapshot streams into newList
9038     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9039             it != mStreamInfo.end();) {
9040         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
9041                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
9042                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
9043             newStreamInfo.push_back(*it);
9044             it = mStreamInfo.erase(it);
9045         } else
9046             it++;
9047     }
9048     // Move raw streams into newList
9049     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9050             it != mStreamInfo.end();) {
9051         newStreamInfo.push_back(*it);
9052         it = mStreamInfo.erase(it);
9053     }
9054 
9055     mStreamInfo = newStreamInfo;
9056 
9057     // Make sure that stream IDs are unique.
9058     uint32_t id = 0;
9059     for (auto streamInfo : mStreamInfo) {
9060         streamInfo->id = id++;
9061     }
9062 
9063 }
9064 
9065 /*===========================================================================
9066  * FUNCTION   : extractJpegMetadata
9067  *
9068  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
9069  *              JPEG metadata is cached in HAL, and return as part of capture
9070  *              result when metadata is returned from camera daemon.
9071  *
9072  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
9073  *              @request:      capture request
9074  *
9075  *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)9076 void QCamera3HardwareInterface::extractJpegMetadata(
9077         CameraMetadata& jpegMetadata,
9078         const camera3_capture_request_t *request)
9079 {
9080     CameraMetadata frame_settings;
9081     frame_settings = request->settings;
9082 
9083     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
9084         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
9085                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
9086                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
9087 
9088     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
9089         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
9090                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
9091                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
9092 
9093     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
9094         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
9095                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
9096                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
9097 
9098     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
9099         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
9100                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
9101                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
9102 
9103     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
9104         jpegMetadata.update(ANDROID_JPEG_QUALITY,
9105                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
9106                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
9107 
9108     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
9109         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
9110                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
9111                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
9112 
9113     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9114         int32_t thumbnail_size[2];
9115         thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9116         thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9117         if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9118             int32_t orientation =
9119                   frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9120             if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
9121                //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
9122                int32_t temp;
9123                temp = thumbnail_size[0];
9124                thumbnail_size[0] = thumbnail_size[1];
9125                thumbnail_size[1] = temp;
9126             }
9127          }
9128          jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
9129                 thumbnail_size,
9130                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
9131     }
9132 
9133 }
9134 
9135 /*===========================================================================
9136  * FUNCTION   : convertToRegions
9137  *
9138  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
9139  *
9140  * PARAMETERS :
9141  *   @rect   : cam_rect_t struct to convert
9142  *   @region : int32_t destination array
9143  *   @weight : if we are converting from cam_area_t, weight is valid
9144  *             else weight = -1
9145  *
9146  *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)9147 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
9148         int32_t *region, int weight)
9149 {
9150     region[FACE_LEFT] = rect.left;
9151     region[FACE_TOP] = rect.top;
9152     region[FACE_RIGHT] = rect.left + rect.width;
9153     region[FACE_BOTTOM] = rect.top + rect.height;
9154     if (weight > -1) {
9155         region[FACE_WEIGHT] = weight;
9156     }
9157 }
9158 
9159 /*===========================================================================
9160  * FUNCTION   : convertFromRegions
9161  *
9162  * DESCRIPTION: helper method to convert from array to cam_rect_t
9163  *
9164  * PARAMETERS :
9165  *   @rect   : cam_rect_t struct to convert
9166  *   @region : int32_t destination array
9167  *   @weight : if we are converting from cam_area_t, weight is valid
9168  *             else weight = -1
9169  *
9170  *==========================================================================*/
convertFromRegions(cam_area_t & roi,const CameraMetadata & frame_settings,uint32_t tag)9171 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
9172         const CameraMetadata &frame_settings, uint32_t tag)
9173 {
9174     int32_t x_min = frame_settings.find(tag).data.i32[0];
9175     int32_t y_min = frame_settings.find(tag).data.i32[1];
9176     int32_t x_max = frame_settings.find(tag).data.i32[2];
9177     int32_t y_max = frame_settings.find(tag).data.i32[3];
9178     roi.weight = frame_settings.find(tag).data.i32[4];
9179     roi.rect.left = x_min;
9180     roi.rect.top = y_min;
9181     roi.rect.width = x_max - x_min;
9182     roi.rect.height = y_max - y_min;
9183 }
9184 
9185 /*===========================================================================
9186  * FUNCTION   : resetIfNeededROI
9187  *
9188  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
9189  *              crop region
9190  *
9191  * PARAMETERS :
9192  *   @roi       : cam_area_t struct to resize
9193  *   @scalerCropRegion : cam_crop_region_t region to compare against
9194  *
9195  *
9196  *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)9197 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
9198                                                  const cam_crop_region_t* scalerCropRegion)
9199 {
9200     int32_t roi_x_max = roi->rect.width + roi->rect.left;
9201     int32_t roi_y_max = roi->rect.height + roi->rect.top;
9202     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
9203     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
9204 
9205     /* According to spec weight = 0 is used to indicate roi needs to be disabled
9206      * without having this check the calculations below to validate if the roi
9207      * is inside scalar crop region will fail resulting in the roi not being
9208      * reset causing algorithm to continue to use stale roi window
9209      */
9210     if (roi->weight == 0) {
9211         return true;
9212     }
9213 
9214     if ((roi_x_max < scalerCropRegion->left) ||
9215         // right edge of roi window is left of scalar crop's left edge
9216         (roi_y_max < scalerCropRegion->top)  ||
9217         // bottom edge of roi window is above scalar crop's top edge
9218         (roi->rect.left > crop_x_max) ||
9219         // left edge of roi window is beyond(right) of scalar crop's right edge
9220         (roi->rect.top > crop_y_max)){
9221         // top edge of roi windo is above scalar crop's top edge
9222         return false;
9223     }
9224     if (roi->rect.left < scalerCropRegion->left) {
9225         roi->rect.left = scalerCropRegion->left;
9226     }
9227     if (roi->rect.top < scalerCropRegion->top) {
9228         roi->rect.top = scalerCropRegion->top;
9229     }
9230     if (roi_x_max > crop_x_max) {
9231         roi_x_max = crop_x_max;
9232     }
9233     if (roi_y_max > crop_y_max) {
9234         roi_y_max = crop_y_max;
9235     }
9236     roi->rect.width = roi_x_max - roi->rect.left;
9237     roi->rect.height = roi_y_max - roi->rect.top;
9238     return true;
9239 }
9240 
9241 /*===========================================================================
9242  * FUNCTION   : convertLandmarks
9243  *
9244  * DESCRIPTION: helper method to extract the landmarks from face detection info
9245  *
9246  * PARAMETERS :
9247  *   @landmark_data : input landmark data to be converted
9248  *   @landmarks : int32_t destination array
9249  *
9250  *
9251  *==========================================================================*/
convertLandmarks(cam_face_landmarks_info_t landmark_data,int32_t * landmarks)9252 void QCamera3HardwareInterface::convertLandmarks(
9253         cam_face_landmarks_info_t landmark_data,
9254         int32_t *landmarks)
9255 {
9256     if (landmark_data.is_left_eye_valid) {
9257         landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
9258         landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
9259     } else {
9260         landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
9261         landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
9262     }
9263 
9264     if (landmark_data.is_right_eye_valid) {
9265         landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
9266         landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
9267     } else {
9268         landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
9269         landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
9270     }
9271 
9272     if (landmark_data.is_mouth_valid) {
9273         landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
9274         landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
9275     } else {
9276         landmarks[MOUTH_X] = FACE_INVALID_POINT;
9277         landmarks[MOUTH_Y] = FACE_INVALID_POINT;
9278     }
9279 }
9280 
9281 /*===========================================================================
9282  * FUNCTION   : setInvalidLandmarks
9283  *
9284  * DESCRIPTION: helper method to set invalid landmarks
9285  *
9286  * PARAMETERS :
9287  *   @landmarks : int32_t destination array
9288  *
9289  *
9290  *==========================================================================*/
setInvalidLandmarks(int32_t * landmarks)9291 void QCamera3HardwareInterface::setInvalidLandmarks(
9292         int32_t *landmarks)
9293 {
9294     landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
9295     landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
9296     landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
9297     landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
9298     landmarks[MOUTH_X] = FACE_INVALID_POINT;
9299     landmarks[MOUTH_Y] = FACE_INVALID_POINT;
9300 }
9301 
9302 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
9303 
9304 /*===========================================================================
9305  * FUNCTION   : getCapabilities
9306  *
9307  * DESCRIPTION: query camera capability from back-end
9308  *
9309  * PARAMETERS :
9310  *   @ops  : mm-interface ops structure
9311  *   @cam_handle  : camera handle for which we need capability
9312  *
9313  * RETURN     : ptr type of capability structure
9314  *              capability for success
9315  *              NULL for failure
9316  *==========================================================================*/
getCapabilities(mm_camera_ops_t * ops,uint32_t cam_handle)9317 cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
9318         uint32_t cam_handle)
9319 {
9320     int rc = NO_ERROR;
9321     QCamera3HeapMemory *capabilityHeap = NULL;
9322     cam_capability_t *cap_ptr = NULL;
9323 
9324     if (ops == NULL) {
9325         LOGE("Invalid arguments");
9326         return NULL;
9327     }
9328 
9329     capabilityHeap = new QCamera3HeapMemory(1);
9330     if (capabilityHeap == NULL) {
9331         LOGE("creation of capabilityHeap failed");
9332         return NULL;
9333     }
9334 
9335     /* Allocate memory for capability buffer */
9336     rc = capabilityHeap->allocate(sizeof(cam_capability_t));
9337     if(rc != OK) {
9338         LOGE("No memory for cappability");
9339         goto allocate_failed;
9340     }
9341 
9342     /* Map memory for capability buffer */
9343     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
9344 
9345     rc = ops->map_buf(cam_handle,
9346             CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
9347             sizeof(cam_capability_t), capabilityHeap->getPtr(0));
9348     if(rc < 0) {
9349         LOGE("failed to map capability buffer");
9350         rc = FAILED_TRANSACTION;
9351         goto map_failed;
9352     }
9353 
9354     /* Query Capability */
9355     rc = ops->query_capability(cam_handle);
9356     if(rc < 0) {
9357         LOGE("failed to query capability");
9358         rc = FAILED_TRANSACTION;
9359         goto query_failed;
9360     }
9361 
9362     cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
9363     if (cap_ptr == NULL) {
9364         LOGE("out of memory");
9365         rc = NO_MEMORY;
9366         goto query_failed;
9367     }
9368 
9369     memset(cap_ptr, 0, sizeof(cam_capability_t));
9370     memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
9371 
9372     int index;
9373     for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
9374         cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
9375         p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
9376         p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
9377     }
9378 
9379 query_failed:
9380     ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
9381 map_failed:
9382     capabilityHeap->deallocate();
9383 allocate_failed:
9384     delete capabilityHeap;
9385 
9386     if (rc != NO_ERROR) {
9387         return NULL;
9388     } else {
9389         return cap_ptr;
9390     }
9391 }
9392 
9393 /*===========================================================================
9394  * FUNCTION   : initCapabilities
9395  *
9396  * DESCRIPTION: initialize camera capabilities in static data struct
9397  *
9398  * PARAMETERS :
9399  *   @cameraId  : camera Id
9400  *
9401  * RETURN     : int32_t type of status
9402  *              NO_ERROR  -- success
9403  *              none-zero failure code
9404  *==========================================================================*/
initCapabilities(uint32_t cameraId)9405 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
9406 {
9407     int rc = 0;
9408     mm_camera_vtbl_t *cameraHandle = NULL;
9409     uint32_t handle = 0;
9410 
9411     rc = camera_open((uint8_t)cameraId, &cameraHandle);
9412     if (rc) {
9413         LOGE("camera_open failed. rc = %d", rc);
9414         goto open_failed;
9415     }
9416     if (!cameraHandle) {
9417         LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
9418         goto open_failed;
9419     }
9420 
9421     handle = get_main_camera_handle(cameraHandle->camera_handle);
9422     gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
9423     if (gCamCapability[cameraId] == NULL) {
9424         rc = FAILED_TRANSACTION;
9425         goto failed_op;
9426     }
9427 
9428     gCamCapability[cameraId]->camera_index = cameraId;
9429     if (is_dual_camera_by_idx(cameraId)) {
9430         handle = get_aux_camera_handle(cameraHandle->camera_handle);
9431         gCamCapability[cameraId]->aux_cam_cap =
9432                 getCapabilities(cameraHandle->ops, handle);
9433         if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
9434             rc = FAILED_TRANSACTION;
9435             free(gCamCapability[cameraId]);
9436             goto failed_op;
9437         }
9438 
9439         // Copy the main camera capability to main_cam_cap struct
9440         gCamCapability[cameraId]->main_cam_cap =
9441                         (cam_capability_t *)malloc(sizeof(cam_capability_t));
9442         if (gCamCapability[cameraId]->main_cam_cap == NULL) {
9443             LOGE("out of memory");
9444             rc = NO_MEMORY;
9445             goto failed_op;
9446         }
9447         memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
9448                 sizeof(cam_capability_t));
9449     }
9450 failed_op:
9451     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9452     cameraHandle = NULL;
9453 open_failed:
9454     return rc;
9455 }
9456 
9457 /*==========================================================================
9458  * FUNCTION   : get3Aversion
9459  *
9460  * DESCRIPTION: get the Q3A S/W version
9461  *
9462  * PARAMETERS :
9463  *  @sw_version: Reference of Q3A structure which will hold version info upon
9464  *               return
9465  *
9466  * RETURN     : None
9467  *
9468  *==========================================================================*/
get3AVersion(cam_q3a_version_t & sw_version)9469 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9470 {
9471     if(gCamCapability[mCameraId])
9472         sw_version = gCamCapability[mCameraId]->q3a_version;
9473     else
9474         LOGE("Capability structure NULL!");
9475 }
9476 
9477 
9478 /*===========================================================================
9479  * FUNCTION   : initParameters
9480  *
9481  * DESCRIPTION: initialize camera parameters
9482  *
9483  * PARAMETERS :
9484  *
9485  * RETURN     : int32_t type of status
9486  *              NO_ERROR  -- success
9487  *              none-zero failure code
9488  *==========================================================================*/
initParameters()9489 int QCamera3HardwareInterface::initParameters()
9490 {
9491     int rc = 0;
9492 
9493     //Allocate Set Param Buffer
9494     mParamHeap = new QCamera3HeapMemory(1);
9495     rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9496     if(rc != OK) {
9497         rc = NO_MEMORY;
9498         LOGE("Failed to allocate SETPARM Heap memory");
9499         delete mParamHeap;
9500         mParamHeap = NULL;
9501         return rc;
9502     }
9503 
9504     //Map memory for parameters buffer
9505     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9506             CAM_MAPPING_BUF_TYPE_PARM_BUF,
9507             mParamHeap->getFd(0),
9508             sizeof(metadata_buffer_t),
9509             (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9510     if(rc < 0) {
9511         LOGE("failed to map SETPARM buffer");
9512         rc = FAILED_TRANSACTION;
9513         mParamHeap->deallocate();
9514         delete mParamHeap;
9515         mParamHeap = NULL;
9516         return rc;
9517     }
9518 
9519     mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9520 
9521     mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9522     return rc;
9523 }
9524 
9525 /*===========================================================================
9526  * FUNCTION   : deinitParameters
9527  *
9528  * DESCRIPTION: de-initialize camera parameters
9529  *
9530  * PARAMETERS :
9531  *
9532  * RETURN     : NONE
9533  *==========================================================================*/
deinitParameters()9534 void QCamera3HardwareInterface::deinitParameters()
9535 {
9536     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9537             CAM_MAPPING_BUF_TYPE_PARM_BUF);
9538 
9539     mParamHeap->deallocate();
9540     delete mParamHeap;
9541     mParamHeap = NULL;
9542 
9543     mParameters = NULL;
9544 
9545     free(mPrevParameters);
9546     mPrevParameters = NULL;
9547 }
9548 
9549 /*===========================================================================
9550  * FUNCTION   : calcMaxJpegSize
9551  *
9552  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9553  *
9554  * PARAMETERS :
9555  *
9556  * RETURN     : max_jpeg_size
9557  *==========================================================================*/
calcMaxJpegSize(uint32_t camera_id)9558 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9559 {
9560     size_t max_jpeg_size = 0;
9561     size_t temp_width, temp_height;
9562     size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9563             MAX_SIZES_CNT);
9564     for (size_t i = 0; i < count; i++) {
9565         temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9566         temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9567         if (temp_width * temp_height > max_jpeg_size ) {
9568             max_jpeg_size = temp_width * temp_height;
9569         }
9570     }
9571     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9572     return max_jpeg_size;
9573 }
9574 
9575 /*===========================================================================
9576  * FUNCTION   : getMaxRawSize
9577  *
9578  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9579  *
9580  * PARAMETERS :
9581  *
9582  * RETURN     : Largest supported Raw Dimension
9583  *==========================================================================*/
getMaxRawSize(uint32_t camera_id)9584 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9585 {
9586     int max_width = 0;
9587     cam_dimension_t maxRawSize;
9588 
9589     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9590     for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9591         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9592             max_width = gCamCapability[camera_id]->raw_dim[i].width;
9593             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9594         }
9595     }
9596     return maxRawSize;
9597 }
9598 
9599 
9600 /*===========================================================================
9601  * FUNCTION   : calcMaxJpegDim
9602  *
9603  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9604  *
9605  * PARAMETERS :
9606  *
9607  * RETURN     : max_jpeg_dim
9608  *==========================================================================*/
calcMaxJpegDim()9609 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9610 {
9611     cam_dimension_t max_jpeg_dim;
9612     cam_dimension_t curr_jpeg_dim;
9613     max_jpeg_dim.width = 0;
9614     max_jpeg_dim.height = 0;
9615     curr_jpeg_dim.width = 0;
9616     curr_jpeg_dim.height = 0;
9617     for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9618         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9619         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9620         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9621             max_jpeg_dim.width * max_jpeg_dim.height ) {
9622             max_jpeg_dim.width = curr_jpeg_dim.width;
9623             max_jpeg_dim.height = curr_jpeg_dim.height;
9624         }
9625     }
9626     return max_jpeg_dim;
9627 }
9628 
9629 /*===========================================================================
9630  * FUNCTION   : addStreamConfig
9631  *
9632  * DESCRIPTION: adds the stream configuration to the array
9633  *
9634  * PARAMETERS :
9635  * @available_stream_configs : pointer to stream configuration array
9636  * @scalar_format            : scalar format
9637  * @dim                      : configuration dimension
9638  * @config_type              : input or output configuration type
9639  *
9640  * RETURN     : NONE
9641  *==========================================================================*/
addStreamConfig(Vector<int32_t> & available_stream_configs,int32_t scalar_format,const cam_dimension_t & dim,int32_t config_type)9642 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9643         int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9644 {
9645     available_stream_configs.add(scalar_format);
9646     available_stream_configs.add(dim.width);
9647     available_stream_configs.add(dim.height);
9648     available_stream_configs.add(config_type);
9649 }
9650 
9651 /*===========================================================================
9652  * FUNCTION   : suppportBurstCapture
9653  *
9654  * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9655  *
9656  * PARAMETERS :
9657  *   @cameraId  : camera Id
9658  *
9659  * RETURN     : true if camera supports BURST_CAPTURE
9660  *              false otherwise
9661  *==========================================================================*/
supportBurstCapture(uint32_t cameraId)9662 bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9663 {
9664     const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9665     const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9666     const int32_t highResWidth = 3264;
9667     const int32_t highResHeight = 2448;
9668 
9669     if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9670         // Maximum resolution images cannot be captured at >= 10fps
9671         // -> not supporting BURST_CAPTURE
9672         return false;
9673     }
9674 
9675     if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9676         // Maximum resolution images can be captured at >= 20fps
9677         // --> supporting BURST_CAPTURE
9678         return true;
9679     }
9680 
9681     // Find the smallest highRes resolution, or largest resolution if there is none
9682     size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9683             MAX_SIZES_CNT);
9684     size_t highRes = 0;
9685     while ((highRes + 1 < totalCnt) &&
9686             (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9687             gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9688             highResWidth * highResHeight)) {
9689         highRes++;
9690     }
9691     if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9692         return true;
9693     } else {
9694         return false;
9695     }
9696 }
9697 
9698 /*===========================================================================
9699  * FUNCTION   : getPDStatIndex
9700  *
9701  * DESCRIPTION: Return the meta raw phase detection statistics index if present
9702  *
9703  * PARAMETERS :
9704  *   @caps    : camera capabilities
9705  *
9706  * RETURN     : int32_t type
9707  *              non-negative - on success
9708  *              -1 - on failure
9709  *==========================================================================*/
getPDStatIndex(cam_capability_t * caps)9710 int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9711     if (nullptr == caps) {
9712         return -1;
9713     }
9714 
9715     uint32_t metaRawCount = caps->meta_raw_channel_count;
9716     int32_t ret = -1;
9717     for (size_t i = 0; i < metaRawCount; i++) {
9718         if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9719             ret = i;
9720             break;
9721         }
9722     }
9723 
9724     return ret;
9725 }
9726 
9727 /*===========================================================================
9728  * FUNCTION   : initStaticMetadata
9729  *
9730  * DESCRIPTION: initialize the static metadata
9731  *
9732  * PARAMETERS :
9733  *   @cameraId  : camera Id
9734  *
9735  * RETURN     : int32_t type of status
9736  *              0  -- success
9737  *              non-zero failure code
9738  *==========================================================================*/
initStaticMetadata(uint32_t cameraId)9739 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9740 {
9741     int rc = 0;
9742     CameraMetadata staticInfo;
9743     size_t count = 0;
9744     bool limitedDevice = false;
9745     char prop[PROPERTY_VALUE_MAX];
9746     bool supportBurst = false;
9747     Vector<int32_t> available_characteristics_keys;
9748 
9749     supportBurst = supportBurstCapture(cameraId);
9750 
9751     /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9752      * guaranteed or if min fps of max resolution is less than 20 fps, its
9753      * advertised as limited device*/
9754     limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9755             (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9756             (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9757             !supportBurst;
9758 
9759     uint8_t supportedHwLvl = limitedDevice ?
9760             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
9761 #ifndef USE_HAL_3_3
9762             // LEVEL_3 - This device will support level 3.
9763             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9764 #else
9765             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
9766 #endif
9767 
9768     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9769             &supportedHwLvl, 1);
9770 
9771     bool facingBack = false;
9772     if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9773             (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9774         facingBack = true;
9775     }
9776     /*HAL 3 only*/
9777     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9778                     &gCamCapability[cameraId]->min_focus_distance, 1);
9779 
9780     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9781                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
9782 
9783     /*should be using focal lengths but sensor doesn't provide that info now*/
9784     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9785                       &gCamCapability[cameraId]->focal_length,
9786                       1);
9787 
9788     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9789             gCamCapability[cameraId]->apertures,
9790             MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9791 
9792     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9793             gCamCapability[cameraId]->filter_densities,
9794             MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9795 
9796 
9797     uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9798     size_t mode_count =
9799         MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9800     for (size_t i = 0; i < mode_count; i++) {
9801       available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9802     }
9803     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9804             available_opt_stab_modes, mode_count);
9805 
9806     int32_t lens_shading_map_size[] = {
9807             MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9808             MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9809     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9810                       lens_shading_map_size,
9811                       sizeof(lens_shading_map_size)/sizeof(int32_t));
9812 
9813     // Lens calibration for MOTION_TRACKING, back camera only
9814     if (cameraId == 0) {
9815 
9816         float poseRotation[4] = {1.0f, 0.f, 0.f, 0.f}; // quaternion rotation
9817         float poseTranslation[3] = {0.0f, 0.f, 0.f}; // xyz translation, meters
9818         uint8_t poseReference = ANDROID_LENS_POSE_REFERENCE_GYROSCOPE;
9819         // TODO: b/70565622 - these should have better identity values as a fallback
9820         float cameraIntrinsics[5] = {100.f, 100.f, 0.f, 1000, 1000}; // fx,fy,sx,cx,cy
9821         float radialDistortion[5] = {0.f, 0.f, 0.f, 0.f, 0.f}; // identity
9822 
9823         bool success = readSensorCalibration(
9824                 gCamCapability[cameraId]->active_array_size.width,
9825                 poseRotation, poseTranslation, cameraIntrinsics, radialDistortion);
9826         if (!success) {
9827             ALOGE("Using identity lens calibration values");
9828         }
9829         staticInfo.update(ANDROID_LENS_POSE_ROTATION,
9830                 poseRotation, sizeof(poseRotation)/sizeof(float));
9831         staticInfo.update(ANDROID_LENS_POSE_TRANSLATION,
9832                 poseTranslation, sizeof(poseTranslation)/sizeof(float));
9833         staticInfo.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
9834                 cameraIntrinsics, sizeof(cameraIntrinsics)/sizeof(float));
9835         staticInfo.update(ANDROID_LENS_DISTORTION,
9836                 radialDistortion, sizeof(radialDistortion)/sizeof(float));
9837         staticInfo.update(ANDROID_LENS_POSE_REFERENCE,
9838                 &poseReference, sizeof(poseReference));
9839     }
9840 
9841     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9842             gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9843 
9844     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9845             gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9846 
9847     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9848             &gCamCapability[cameraId]->max_frame_duration, 1);
9849 
9850     camera_metadata_rational baseGainFactor = {
9851             gCamCapability[cameraId]->base_gain_factor.numerator,
9852             gCamCapability[cameraId]->base_gain_factor.denominator};
9853     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9854                       &baseGainFactor, 1);
9855 
9856     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9857                      (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9858 
9859     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9860             gCamCapability[cameraId]->pixel_array_size.height};
9861     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9862                       pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9863 
9864     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9865             gCamCapability[cameraId]->active_array_size.top,
9866             gCamCapability[cameraId]->active_array_size.width,
9867             gCamCapability[cameraId]->active_array_size.height};
9868     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9869             active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9870 
9871     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9872             &gCamCapability[cameraId]->white_level, 1);
9873 
9874     int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9875     adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9876             gCamCapability[cameraId]->color_arrangement);
9877     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
9878             adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
9879 
9880 #ifndef USE_HAL_3_3
9881     bool hasBlackRegions = false;
9882     if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9883         LOGW("black_region_count: %d is bounded to %d",
9884             gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9885         gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9886     }
9887     if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9888         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9889         for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9890             opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9891         }
9892         staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9893                 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9894         hasBlackRegions = true;
9895     }
9896 #endif
9897     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9898             &gCamCapability[cameraId]->flash_charge_duration, 1);
9899 
9900     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9901             &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9902 
9903     uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9904             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9905             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
9906     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9907             &timestampSource, 1);
9908 
9909     //update histogram vendor data
9910     staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
9911             &gCamCapability[cameraId]->histogram_size, 1);
9912 
9913     staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
9914             &gCamCapability[cameraId]->max_histogram_count, 1);
9915 
9916     //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9917     //so that app can request fewer number of bins than the maximum supported.
9918     std::vector<int32_t> histBins;
9919     int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9920     histBins.push_back(maxHistBins);
9921     while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9922            (maxHistBins & 0x1) == 0) {
9923         histBins.push_back(maxHistBins >> 1);
9924         maxHistBins >>= 1;
9925     }
9926     staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9927             histBins.data(), histBins.size());
9928     if (!histBins.empty()) {
9929         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS);
9930     }
9931 
9932     int32_t sharpness_map_size[] = {
9933             gCamCapability[cameraId]->sharpness_map_size.width,
9934             gCamCapability[cameraId]->sharpness_map_size.height};
9935 
9936     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9937             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9938 
9939     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9940             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9941 
9942     int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9943     if (0 <= indexPD) {
9944         // Advertise PD stats data as part of the Depth capabilities
9945         int32_t depthWidth =
9946                 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9947         int32_t depthHeight =
9948                 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9949         int32_t depthStride =
9950                 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
9951         int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9952         assert(0 < depthSamplesCount);
9953         staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9954                 &depthSamplesCount, 1);
9955 
9956         int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9957                 depthHeight,
9958                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9959                 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9960                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9961         staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9962                 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9963 
9964         int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9965                 depthHeight, 33333333,
9966                 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9967         staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9968                 depthMinDuration,
9969                 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9970 
9971         int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9972                 depthHeight, 0,
9973                 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9974         staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9975                 depthStallDuration,
9976                 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9977 
9978         uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9979         staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9980 
9981         //RAW16 depth format doesn't require inefficient memory copy, recommend
9982         //only this depth format. The format itself is not public so it won't be
9983         //possible to advertise in the RAW use case. Use snapshot for now.
9984         int32_t recommendedDepthConfigs[] = {depthWidth, depthHeight, HAL_PIXEL_FORMAT_RAW16,
9985             ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9986             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_SNAPSHOT};
9987         staticInfo.update(ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS,
9988                 recommendedDepthConfigs,
9989                 sizeof(recommendedDepthConfigs) / sizeof(recommendedDepthConfigs[0]));
9990         available_characteristics_keys.add(
9991                 ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS);
9992 
9993         int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9994         staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9995                 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
9996         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS);
9997 
9998         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
9999                 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.right_gain_map),
10000                 sizeof(gCamCapability[cameraId]->pdaf_cal.right_gain_map));
10001         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS);
10002 
10003         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
10004                 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.left_gain_map),
10005                 sizeof(gCamCapability[cameraId]->pdaf_cal.left_gain_map));
10006         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS);
10007 
10008         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
10009                 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.conversion_coeff),
10010                 sizeof(gCamCapability[cameraId]->pdaf_cal.conversion_coeff));
10011         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF);
10012     }
10013 
10014 
10015     staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_NUM_LIGHTS,
10016         &(gCamCapability[cameraId]->wb_cal.num_lights), 1);
10017     available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_NUM_LIGHTS);
10018 
10019     const int32_t num_lights = gCamCapability[cameraId]->wb_cal.num_lights;
10020     staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_R_OVER_G_RATIOS,
10021         gCamCapability[cameraId]->wb_cal.r_over_g, num_lights);
10022     available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_R_OVER_G_RATIOS);
10023 
10024     staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_B_OVER_G_RATIOS,
10025         gCamCapability[cameraId]->wb_cal.b_over_g, num_lights);
10026     available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_B_OVER_G_RATIOS);
10027 
10028     staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_GR_OVER_GB_RATIO,
10029         &(gCamCapability[cameraId]->wb_cal.gr_over_gb), 1);
10030     available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_GR_OVER_GB_RATIO);
10031 
10032     int32_t scalar_formats[] = {
10033             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
10034             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
10035             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
10036             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
10037             HAL_PIXEL_FORMAT_RAW10,
10038             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
10039             HAL_PIXEL_FORMAT_Y8};
10040     size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
10041     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
10042             scalar_formats_count);
10043 
10044     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
10045     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
10046     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
10047             count, MAX_SIZES_CNT, available_processed_sizes);
10048     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
10049             available_processed_sizes, count * 2);
10050 
10051     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
10052     count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
10053     makeTable(gCamCapability[cameraId]->raw_dim,
10054             count, MAX_SIZES_CNT, available_raw_sizes);
10055     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
10056             available_raw_sizes, count * 2);
10057 
10058     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
10059     count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
10060     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
10061             count, MAX_SIZES_CNT, available_fps_ranges);
10062     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10063             available_fps_ranges, count * 2);
10064 
10065     camera_metadata_rational exposureCompensationStep = {
10066             gCamCapability[cameraId]->exp_compensation_step.numerator,
10067             gCamCapability[cameraId]->exp_compensation_step.denominator};
10068     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
10069                       &exposureCompensationStep, 1);
10070 
10071     Vector<uint8_t> availableVstabModes;
10072     availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
10073     char eis_prop[PROPERTY_VALUE_MAX];
10074     bool eisSupported = false;
10075     memset(eis_prop, 0, sizeof(eis_prop));
10076     property_get("persist.camera.eis.enable", eis_prop, "1");
10077     uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
10078     count = IS_TYPE_MAX;
10079     count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
10080     for (size_t i = 0; i < count; i++) {
10081         if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
10082             (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
10083             eisSupported = true;
10084             break;
10085         }
10086     }
10087     if (facingBack && eis_prop_set && eisSupported) {
10088         availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
10089     }
10090     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10091                       availableVstabModes.array(), availableVstabModes.size());
10092 
10093     /*HAL 1 and HAL 3 common*/
10094     uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
10095     uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
10096     uint32_t minZoomStep = 100; //as per HAL1/API1 spec
10097     // Cap the max zoom to the max preferred value
10098     float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
10099     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10100             &maxZoom, 1);
10101 
10102     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
10103     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
10104 
10105     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
10106     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
10107         max3aRegions[2] = 0; /* AF not supported */
10108     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
10109             max3aRegions, 3);
10110 
10111     /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
10112     memset(prop, 0, sizeof(prop));
10113     property_get("persist.camera.facedetect", prop, "1");
10114     uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
10115     LOGD("Support face detection mode: %d",
10116              supportedFaceDetectMode);
10117 
10118     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
10119     /* support mode should be OFF if max number of face is 0 */
10120     if (maxFaces <= 0) {
10121         supportedFaceDetectMode = 0;
10122     }
10123     Vector<uint8_t> availableFaceDetectModes;
10124     availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
10125     if (supportedFaceDetectMode == 1) {
10126         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
10127     } else if (supportedFaceDetectMode == 2) {
10128         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
10129     } else if (supportedFaceDetectMode == 3) {
10130         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
10131         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
10132     } else {
10133         maxFaces = 0;
10134     }
10135     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
10136             availableFaceDetectModes.array(),
10137             availableFaceDetectModes.size());
10138     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
10139             (int32_t *)&maxFaces, 1);
10140     uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
10141     staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
10142             &face_bsgc, 1);
10143 
10144     int32_t exposureCompensationRange[] = {
10145             gCamCapability[cameraId]->exposure_compensation_min,
10146             gCamCapability[cameraId]->exposure_compensation_max};
10147     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
10148             exposureCompensationRange,
10149             sizeof(exposureCompensationRange)/sizeof(int32_t));
10150 
10151     uint8_t lensFacing = (facingBack) ?
10152             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
10153     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
10154 
10155     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10156                       available_thumbnail_sizes,
10157                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
10158 
10159     /*all sizes will be clubbed into this tag*/
10160     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
10161     /*android.scaler.availableStreamConfigurations*/
10162     Vector<int32_t> available_stream_configs;
10163     std::vector<config_entry> stream_configs;
10164     std::unordered_map<config_entry, int32_t, ConfigEntryHash> suggested_configs;
10165     int32_t suggested_proc_formats[] = {
10166         ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
10167         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
10168     size_t suggested_formats_count = sizeof(suggested_proc_formats) /
10169         sizeof(suggested_proc_formats[0]);
10170     cam_dimension_t active_array_dim;
10171     active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
10172     active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
10173 
10174     /*advertise list of input dimensions supported based on below property.
10175     By default all sizes upto 5MP will be advertised.
10176     Note that the setprop resolution format should be WxH.
10177     e.g: adb shell setprop persist.camera.input.minsize 1280x720
10178     To list all supported sizes, setprop needs to be set with "0x0" */
10179     cam_dimension_t minInputSize = {2592,1944}; //5MP
10180     memset(prop, 0, sizeof(prop));
10181     property_get("persist.camera.input.minsize", prop, "2592x1944");
10182     if (strlen(prop) > 0) {
10183         char *saveptr = NULL;
10184         char *token = strtok_r(prop, "x", &saveptr);
10185         if (token != NULL) {
10186             minInputSize.width = atoi(token);
10187         }
10188         token = strtok_r(NULL, "x", &saveptr);
10189         if (token != NULL) {
10190             minInputSize.height = atoi(token);
10191         }
10192     }
10193 
10194     int32_t raw_usecase =
10195             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RAW;
10196     int32_t zsl_snapshot_usecase =
10197             (1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_SNAPSHOT) |
10198             (1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_ZSL);
10199     int32_t zsl_usecase =
10200             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_ZSL;
10201     /* Add input/output stream configurations for each scalar formats*/
10202     for (size_t j = 0; j < scalar_formats_count; j++) {
10203         switch (scalar_formats[j]) {
10204         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
10205         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
10206         case HAL_PIXEL_FORMAT_RAW10:
10207             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10208                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10209                 addStreamConfig(available_stream_configs, scalar_formats[j],
10210                         gCamCapability[cameraId]->raw_dim[i],
10211                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10212                 config_entry entry(gCamCapability[cameraId]->raw_dim[i].width,
10213                         gCamCapability[cameraId]->raw_dim[i].height, scalar_formats[j],
10214                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10215                 stream_configs.push_back(entry);
10216                 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_RAW10) ||
10217                         (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE)) {
10218                     suggested_configs[entry] |= raw_usecase;
10219                 }
10220             }
10221             break;
10222         case HAL_PIXEL_FORMAT_BLOB:
10223             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10224                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10225                 addStreamConfig(available_stream_configs, scalar_formats[j],
10226                         gCamCapability[cameraId]->picture_sizes_tbl[i],
10227                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10228                 stream_configs.push_back(config_entry(
10229                             gCamCapability[cameraId]->picture_sizes_tbl[i].width,
10230                             gCamCapability[cameraId]->picture_sizes_tbl[i].height,
10231                             scalar_formats[j],
10232                             ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT));
10233                 config_entry entry(gCamCapability[cameraId]->picture_sizes_tbl[i].width,
10234                         gCamCapability[cameraId]->picture_sizes_tbl[i].height, scalar_formats[j],
10235                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10236                 suggested_configs[entry] |= zsl_snapshot_usecase;
10237             }
10238             break;
10239         case HAL_PIXEL_FORMAT_YCbCr_420_888:
10240         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
10241         case HAL_PIXEL_FORMAT_Y8:
10242         default:
10243             cam_dimension_t largest_picture_size;
10244             memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
10245             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10246                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10247                 addStreamConfig(available_stream_configs, scalar_formats[j],
10248                         gCamCapability[cameraId]->picture_sizes_tbl[i],
10249                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10250                 config_entry entry(gCamCapability[cameraId]->picture_sizes_tbl[i].width,
10251                         gCamCapability[cameraId]->picture_sizes_tbl[i].height,
10252                         scalar_formats[j],
10253                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10254                 stream_configs.push_back(entry);
10255                 suggested_configs[entry] |= zsl_snapshot_usecase;
10256                 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
10257                 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
10258                         scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
10259                         scalar_formats[j] == HAL_PIXEL_FORMAT_Y8) && i == 0) {
10260                      if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
10261                             >= minInputSize.width) || (gCamCapability[cameraId]->
10262                             picture_sizes_tbl[i].height >= minInputSize.height)) {
10263                          addStreamConfig(available_stream_configs, scalar_formats[j],
10264                                  gCamCapability[cameraId]->picture_sizes_tbl[i],
10265                                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
10266                          config_entry entry(
10267                                  gCamCapability[cameraId]->picture_sizes_tbl[i].width,
10268                                  gCamCapability[cameraId]->picture_sizes_tbl[i].height,
10269                                  scalar_formats[j],
10270                                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
10271                          suggested_configs[entry] |= zsl_usecase;
10272                      }
10273                 }
10274             }
10275 
10276             break;
10277         }
10278     }
10279 
10280     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10281                       available_stream_configs.array(), available_stream_configs.size());
10282 
10283     int32_t preview_usecase =
10284             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PREVIEW;
10285     for (size_t i = 0; i < gCamCapability[cameraId]->preview_sizes_tbl_cnt; i++) {
10286         for (size_t j = 0; j < suggested_formats_count; j++) {
10287             config_entry entry(gCamCapability[cameraId]->preview_sizes_tbl[i].width,
10288                     gCamCapability[cameraId]->preview_sizes_tbl[i].height,
10289                     suggested_proc_formats[j],
10290                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10291             if (std::find(stream_configs.begin(), stream_configs.end(), entry) !=
10292                     stream_configs.end()) {
10293                 suggested_configs[entry] |= preview_usecase;
10294             }
10295         }
10296     }
10297 
10298     int32_t record_usecase =
10299             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RECORD;
10300     for (size_t i = 0; i < gCamCapability[cameraId]->video_sizes_tbl_cnt; i++) {
10301         for (size_t j = 0; j < suggested_formats_count; j++) {
10302             config_entry entry(gCamCapability[cameraId]->video_sizes_tbl[i].width,
10303                     gCamCapability[cameraId]->video_sizes_tbl[i].height,
10304                     suggested_proc_formats[j],
10305                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10306             if (std::find(stream_configs.begin(), stream_configs.end(), entry) !=
10307                     stream_configs.end()) {
10308                 suggested_configs[entry] |= record_usecase;
10309             }
10310         }
10311     }
10312 
10313     int32_t video_snapshot_usecase =
10314             1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VIDEO_SNAPSHOT;
10315     for (size_t i = 0; i < gCamCapability[cameraId]->livesnapshot_sizes_tbl_cnt; i++) {
10316         config_entry entry(gCamCapability[cameraId]->livesnapshot_sizes_tbl[i].width,
10317                 gCamCapability[cameraId]->livesnapshot_sizes_tbl[i].height,
10318                 HAL_PIXEL_FORMAT_BLOB,
10319                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
10320         if (std::find(stream_configs.begin(), stream_configs.end(), entry) !=
10321                 stream_configs.end()) {
10322             suggested_configs[entry] |= video_snapshot_usecase;
10323         }
10324     }
10325 
10326     std::vector<int32_t> suggested_array;
10327     suggested_array.reserve(suggested_configs.size() * 5);
10328     for (const auto &it : suggested_configs) {
10329         suggested_array.push_back(std::get<0>(it.first));
10330         suggested_array.push_back(std::get<1>(it.first));
10331         suggested_array.push_back(std::get<2>(it.first));
10332         suggested_array.push_back(std::get<3>(it.first));
10333         suggested_array.push_back(it.second);
10334     }
10335 
10336     staticInfo.update(ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS,
10337             suggested_array.data(), suggested_array.size());
10338 
10339     /* android.scaler.availableMinFrameDurations */
10340     Vector<int64_t> available_min_durations;
10341     for (size_t j = 0; j < scalar_formats_count; j++) {
10342         switch (scalar_formats[j]) {
10343         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
10344         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
10345         case HAL_PIXEL_FORMAT_RAW10:
10346             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10347                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10348                 available_min_durations.add(scalar_formats[j]);
10349                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10350                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10351                 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
10352             }
10353             break;
10354         default:
10355             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10356                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10357                 available_min_durations.add(scalar_formats[j]);
10358                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10359                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10360                 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
10361             }
10362             break;
10363         }
10364     }
10365     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
10366                       available_min_durations.array(), available_min_durations.size());
10367 
10368     Vector<int32_t> available_hfr_configs;
10369     for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
10370         int32_t fps = 0;
10371         switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
10372         case CAM_HFR_MODE_60FPS:
10373             fps = 60;
10374             break;
10375         case CAM_HFR_MODE_90FPS:
10376             fps = 90;
10377             break;
10378         case CAM_HFR_MODE_120FPS:
10379             fps = 120;
10380             break;
10381         case CAM_HFR_MODE_150FPS:
10382             fps = 150;
10383             break;
10384         case CAM_HFR_MODE_180FPS:
10385             fps = 180;
10386             break;
10387         case CAM_HFR_MODE_210FPS:
10388             fps = 210;
10389             break;
10390         case CAM_HFR_MODE_240FPS:
10391             fps = 240;
10392             break;
10393         case CAM_HFR_MODE_480FPS:
10394             fps = 480;
10395             break;
10396         case CAM_HFR_MODE_OFF:
10397         case CAM_HFR_MODE_MAX:
10398         default:
10399             break;
10400         }
10401 
10402         /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
10403         if (fps >= MIN_FPS_FOR_BATCH_MODE) {
10404             /* For each HFR frame rate, need to advertise one variable fps range
10405              * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
10406              * and [120, 120]. While camcorder preview alone is running [30, 120] is
10407              * set by the app. When video recording is started, [120, 120] is
10408              * set. This way sensor configuration does not change when recording
10409              * is started */
10410 
10411             /* (width, height, fps_min, fps_max, batch_size_max) */
10412             for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
10413                 j < MAX_SIZES_CNT; j++) {
10414                 available_hfr_configs.add(
10415                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
10416                 available_hfr_configs.add(
10417                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
10418                 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
10419                 available_hfr_configs.add(fps);
10420                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
10421 
10422                 /* (width, height, fps_min, fps_max, batch_size_max) */
10423                 available_hfr_configs.add(
10424                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
10425                 available_hfr_configs.add(
10426                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
10427                 available_hfr_configs.add(fps);
10428                 available_hfr_configs.add(fps);
10429                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
10430             }
10431        }
10432     }
10433     //Advertise HFR capability only if the property is set
10434     memset(prop, 0, sizeof(prop));
10435     property_get("persist.camera.hal3hfr.enable", prop, "1");
10436     uint8_t hfrEnable = (uint8_t)atoi(prop);
10437 
10438     if(hfrEnable && available_hfr_configs.array()) {
10439         staticInfo.update(
10440                 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
10441                 available_hfr_configs.array(), available_hfr_configs.size());
10442     }
10443 
10444     int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
10445     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
10446                       &max_jpeg_size, 1);
10447 
10448     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
10449     size_t size = 0;
10450     count = CAM_EFFECT_MODE_MAX;
10451     count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
10452     for (size_t i = 0; i < count; i++) {
10453         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10454                 gCamCapability[cameraId]->supported_effects[i]);
10455         if (NAME_NOT_FOUND != val) {
10456             avail_effects[size] = (uint8_t)val;
10457             size++;
10458         }
10459     }
10460     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
10461                       avail_effects,
10462                       size);
10463 
10464     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
10465     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
10466     size_t supported_scene_modes_cnt = 0;
10467     count = CAM_SCENE_MODE_MAX;
10468     count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
10469     for (size_t i = 0; i < count; i++) {
10470         if (gCamCapability[cameraId]->supported_scene_modes[i] !=
10471                 CAM_SCENE_MODE_OFF) {
10472             int val = lookupFwkName(SCENE_MODES_MAP,
10473                     METADATA_MAP_SIZE(SCENE_MODES_MAP),
10474                     gCamCapability[cameraId]->supported_scene_modes[i]);
10475 
10476             if (NAME_NOT_FOUND != val) {
10477                 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
10478                 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
10479                 supported_scene_modes_cnt++;
10480             }
10481         }
10482     }
10483     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10484                       avail_scene_modes,
10485                       supported_scene_modes_cnt);
10486 
10487     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
10488     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
10489                       supported_scene_modes_cnt,
10490                       CAM_SCENE_MODE_MAX,
10491                       scene_mode_overrides,
10492                       supported_indexes,
10493                       cameraId);
10494 
10495     if (supported_scene_modes_cnt == 0) {
10496         supported_scene_modes_cnt = 1;
10497         avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
10498     }
10499 
10500     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
10501             scene_mode_overrides, supported_scene_modes_cnt * 3);
10502 
10503     uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
10504                                          ANDROID_CONTROL_MODE_AUTO,
10505                                          ANDROID_CONTROL_MODE_USE_SCENE_MODE};
10506     staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
10507             available_control_modes,
10508             3);
10509 
10510     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
10511     size = 0;
10512     count = CAM_ANTIBANDING_MODE_MAX;
10513     count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
10514     for (size_t i = 0; i < count; i++) {
10515         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
10516                 gCamCapability[cameraId]->supported_antibandings[i]);
10517         if (NAME_NOT_FOUND != val) {
10518             avail_antibanding_modes[size] = (uint8_t)val;
10519             size++;
10520         }
10521 
10522     }
10523     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
10524                       avail_antibanding_modes,
10525                       size);
10526 
10527     uint8_t avail_abberation_modes[] = {
10528             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
10529             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
10530             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
10531     count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
10532     count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
10533     if (0 == count) {
10534         //  If no aberration correction modes are available for a device, this advertise OFF mode
10535         size = 1;
10536     } else {
10537         // If count is not zero then atleast one among the FAST or HIGH quality is supported
10538         // So, advertize all 3 modes if atleast any one mode is supported as per the
10539         // new M requirement
10540         size = 3;
10541     }
10542     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10543             avail_abberation_modes,
10544             size);
10545 
10546     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
10547     size = 0;
10548     count = CAM_FOCUS_MODE_MAX;
10549     count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
10550     for (size_t i = 0; i < count; i++) {
10551         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10552                 gCamCapability[cameraId]->supported_focus_modes[i]);
10553         if (NAME_NOT_FOUND != val) {
10554             avail_af_modes[size] = (uint8_t)val;
10555             size++;
10556         }
10557     }
10558     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
10559                       avail_af_modes,
10560                       size);
10561 
10562     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
10563     size = 0;
10564     count = CAM_WB_MODE_MAX;
10565     count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
10566     for (size_t i = 0; i < count; i++) {
10567         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10568                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10569                 gCamCapability[cameraId]->supported_white_balances[i]);
10570         if (NAME_NOT_FOUND != val) {
10571             avail_awb_modes[size] = (uint8_t)val;
10572             size++;
10573         }
10574     }
10575     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
10576                       avail_awb_modes,
10577                       size);
10578 
10579     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
10580     count = CAM_FLASH_FIRING_LEVEL_MAX;
10581     count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
10582             count);
10583     for (size_t i = 0; i < count; i++) {
10584         available_flash_levels[i] =
10585                 gCamCapability[cameraId]->supported_firing_levels[i];
10586     }
10587     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
10588             available_flash_levels, count);
10589 
10590     uint8_t flashAvailable;
10591     if (gCamCapability[cameraId]->flash_available)
10592         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
10593     else
10594         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
10595     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
10596             &flashAvailable, 1);
10597 
10598     Vector<uint8_t> avail_ae_modes;
10599     count = CAM_AE_MODE_MAX;
10600     count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
10601     for (size_t i = 0; i < count; i++) {
10602         uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
10603         if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
10604             aeMode = ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH;
10605         }
10606         avail_ae_modes.add(aeMode);
10607     }
10608     if (flashAvailable) {
10609         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
10610         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
10611     }
10612     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
10613                       avail_ae_modes.array(),
10614                       avail_ae_modes.size());
10615 
10616     int32_t sensitivity_range[2];
10617     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
10618     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
10619     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
10620                       sensitivity_range,
10621                       sizeof(sensitivity_range) / sizeof(int32_t));
10622 
10623     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10624                       &gCamCapability[cameraId]->max_analog_sensitivity,
10625                       1);
10626 
10627     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10628     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10629                       &sensor_orientation,
10630                       1);
10631 
10632     int32_t max_output_streams[] = {
10633             MAX_STALLING_STREAMS,
10634             MAX_PROCESSED_STREAMS,
10635             MAX_RAW_STREAMS};
10636     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10637             max_output_streams,
10638             sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10639 
10640     uint8_t avail_leds = 0;
10641     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10642                       &avail_leds, 0);
10643 
10644     uint8_t focus_dist_calibrated;
10645     int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10646             gCamCapability[cameraId]->focus_dist_calibrated);
10647     if (NAME_NOT_FOUND != val) {
10648         focus_dist_calibrated = (uint8_t)val;
10649         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10650                      &focus_dist_calibrated, 1);
10651     }
10652 
10653     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10654     size = 0;
10655     count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10656             MAX_TEST_PATTERN_CNT);
10657     for (size_t i = 0; i < count; i++) {
10658         int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10659                 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10660         if (NAME_NOT_FOUND != testpatternMode) {
10661             avail_testpattern_modes[size] = testpatternMode;
10662             size++;
10663         }
10664     }
10665     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10666                       avail_testpattern_modes,
10667                       size);
10668 
10669     uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10670     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10671                       &max_pipeline_depth,
10672                       1);
10673 
10674     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10675     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10676                       &partial_result_count,
10677                        1);
10678 
10679     int32_t max_stall_duration = MAX_REPROCESS_STALL;
10680     staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10681 
10682     Vector<uint8_t> available_capabilities;
10683     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10684     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10685     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10686     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10687     if (supportBurst) {
10688         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10689     }
10690     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10691     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10692     if (hfrEnable && available_hfr_configs.array()) {
10693         available_capabilities.add(
10694                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10695     }
10696 
10697     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10698         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10699     }
10700     // Only back camera supports MOTION_TRACKING
10701     if (cameraId == 0) {
10702         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING);
10703     }
10704 
10705     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10706             available_capabilities.array(),
10707             available_capabilities.size());
10708 
10709     //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10710     //Assumption is that all bayer cameras support MANUAL_SENSOR.
10711     uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10712             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10713 
10714     staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10715             &aeLockAvailable, 1);
10716 
10717     //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10718     //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10719     uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10720             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10721 
10722     staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10723             &awbLockAvailable, 1);
10724 
10725     int32_t max_input_streams = 1;
10726     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10727                       &max_input_streams,
10728                       1);
10729 
10730     /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10731     int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 3,
10732             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10733             HAL_PIXEL_FORMAT_Y8,
10734             HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10735             HAL_PIXEL_FORMAT_YCbCr_420_888,
10736             HAL_PIXEL_FORMAT_Y8, 2,
10737             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_Y8};
10738     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10739                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10740 
10741     staticInfo.update(ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP,
10742                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10743 
10744     int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10745     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10746                       &max_latency,
10747                       1);
10748 
10749 #ifndef USE_HAL_3_3
10750     int32_t isp_sensitivity_range[2];
10751     isp_sensitivity_range[0] =
10752         gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10753     isp_sensitivity_range[1] =
10754         gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10755     staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10756                       isp_sensitivity_range,
10757                       sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10758 #endif
10759 
10760     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10761                                            ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10762     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10763             available_hot_pixel_modes,
10764             sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10765 
10766     uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10767                                          ANDROID_SHADING_MODE_FAST,
10768                                          ANDROID_SHADING_MODE_HIGH_QUALITY};
10769     staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10770                       available_shading_modes,
10771                       3);
10772 
10773     uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10774                                                   ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10775     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10776                       available_lens_shading_map_modes,
10777                       2);
10778 
10779     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10780                                       ANDROID_EDGE_MODE_FAST,
10781                                       ANDROID_EDGE_MODE_HIGH_QUALITY,
10782                                       ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10783     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10784             available_edge_modes,
10785             sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10786 
10787     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10788                                            ANDROID_NOISE_REDUCTION_MODE_FAST,
10789                                            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10790                                            ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10791                                            ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10792     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10793             available_noise_red_modes,
10794             sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10795 
10796     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10797                                          ANDROID_TONEMAP_MODE_FAST,
10798                                          ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10799     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10800             available_tonemap_modes,
10801             sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10802 
10803     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10804     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10805             available_hot_pixel_map_modes,
10806             sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10807 
10808     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10809             gCamCapability[cameraId]->reference_illuminant1);
10810     if (NAME_NOT_FOUND != val) {
10811         uint8_t fwkReferenceIlluminant = (uint8_t)val;
10812         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10813     }
10814 
10815     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10816             gCamCapability[cameraId]->reference_illuminant2);
10817     if (NAME_NOT_FOUND != val) {
10818         uint8_t fwkReferenceIlluminant = (uint8_t)val;
10819         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10820     }
10821 
10822     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10823             (void *)gCamCapability[cameraId]->forward_matrix1,
10824             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10825 
10826     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10827             (void *)gCamCapability[cameraId]->forward_matrix2,
10828             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10829 
10830     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10831             (void *)gCamCapability[cameraId]->color_transform1,
10832             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10833 
10834     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10835             (void *)gCamCapability[cameraId]->color_transform2,
10836             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10837 
10838     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10839             (void *)gCamCapability[cameraId]->calibration_transform1,
10840             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10841 
10842     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10843             (void *)gCamCapability[cameraId]->calibration_transform2,
10844             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10845 
10846 #ifndef USE_HAL_3_3
10847 
10848     int32_t session_keys[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10849         ANDROID_CONTROL_AE_TARGET_FPS_RANGE, QCAMERA3_INSTANT_AEC_MODE, QCAMERA3_USE_AV_TIMER,
10850         QCAMERA3_VIDEO_HDR_MODE, TANGO_MODE_DATA_SENSOR_FULLFOV};
10851     staticInfo.update(ANDROID_REQUEST_AVAILABLE_SESSION_KEYS, session_keys,
10852             sizeof(session_keys) / sizeof(session_keys[0]));
10853 
10854 #endif
10855 
10856     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10857        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10858        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10859        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10860        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10861        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10862        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10863        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10864        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10865        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10866        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10867        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10868        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10869        ANDROID_JPEG_GPS_COORDINATES,
10870        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10871        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10872        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10873        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10874        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10875        ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10876        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10877        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10878        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10879        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
10880 #ifndef USE_HAL_3_3
10881        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10882 #endif
10883        ANDROID_STATISTICS_FACE_DETECT_MODE,
10884        ANDROID_STATISTICS_SHARPNESS_MAP_MODE, ANDROID_STATISTICS_OIS_DATA_MODE,
10885        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10886        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
10887        ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
10888        QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
10889        QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
10890        QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TEMPORAL_DENOISE_ENABLE,
10891        QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, QCAMERA3_USE_ISO_EXP_PRIORITY,
10892        QCAMERA3_SELECT_PRIORITY, QCAMERA3_USE_SATURATION,
10893        QCAMERA3_EXPOSURE_METER, QCAMERA3_USE_AV_TIMER,
10894        QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
10895        QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
10896        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10897        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
10898        QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
10899        QCAMERA3_JPEG_ENCODE_CROP_ENABLE, QCAMERA3_JPEG_ENCODE_CROP_RECT,
10900        QCAMERA3_JPEG_ENCODE_CROP_ROI, QCAMERA3_VIDEO_HDR_MODE,
10901        QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
10902        QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
10903        QCAMERA3_SHARPNESS_STRENGTH, QCAMERA3_HISTOGRAM_MODE,
10904        QCAMERA3_BINNING_CORRECTION_MODE,
10905        /* DevCamDebug metadata request_keys_basic */
10906        DEVCAMDEBUG_META_ENABLE,
10907        /* DevCamDebug metadata end */
10908        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10909        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10910        TANGO_MODE_DATA_SENSOR_FULLFOV,
10911        NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
10912        NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
10913        NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE,
10914        NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE,
10915        };
10916 
10917     size_t request_keys_cnt =
10918             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10919     Vector<int32_t> available_request_keys;
10920     available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10921     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10922         available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10923     }
10924 
10925     if (gExposeEnableZslKey) {
10926         available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10927         available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
10928         available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
10929         available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
10930     }
10931 
10932     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10933             available_request_keys.array(), available_request_keys.size());
10934 
10935     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10936        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10937        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10938        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10939        ANDROID_CONTROL_AF_SCENE_CHANGE,
10940        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10941        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10942        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10943        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10944        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10945        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10946        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10947        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10948        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10949        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10950        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10951        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10952        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
10953        ANDROID_STATISTICS_FACE_DETECT_MODE,
10954        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10955        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10956        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
10957        ANDROID_STATISTICS_FACE_SCORES, ANDROID_STATISTICS_OIS_DATA_MODE,
10958        ANDROID_STATISTICS_OIS_TIMESTAMPS, ANDROID_STATISTICS_OIS_X_SHIFTS,
10959        ANDROID_STATISTICS_OIS_Y_SHIFTS,
10960 #ifndef USE_HAL_3_3
10961        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10962 #endif
10963        NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
10964        NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
10965        QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
10966        QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
10967        QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TUNING_META_DATA_BLOB,
10968        QCAMERA3_TEMPORAL_DENOISE_ENABLE, QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE,
10969        QCAMERA3_EXPOSURE_METER, QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN,
10970        QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
10971        QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
10972        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10973        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
10974        QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB, QCAMERA3_VIDEO_HDR_MODE,
10975        QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
10976        QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
10977        QCAMERA3_HISTOGRAM_MODE, QCAMERA3_BINNING_CORRECTION_MODE,
10978        QCAMERA3_STATS_IS_HDR_SCENE, QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
10979        QCAMERA3_STATS_BLINK_DETECTED, QCAMERA3_STATS_BLINK_DEGREE,
10980        QCAMERA3_STATS_SMILE_DEGREE, QCAMERA3_STATS_SMILE_CONFIDENCE,
10981        QCAMERA3_STATS_GAZE_ANGLE, QCAMERA3_STATS_GAZE_DIRECTION,
10982        QCAMERA3_STATS_GAZE_DEGREE,
10983        // DevCamDebug metadata result_keys_basic
10984        DEVCAMDEBUG_META_ENABLE,
10985        // DevCamDebug metadata result_keys AF
10986        DEVCAMDEBUG_AF_LENS_POSITION,
10987        DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10988        DEVCAMDEBUG_AF_TOF_DISTANCE,
10989        DEVCAMDEBUG_AF_LUMA,
10990        DEVCAMDEBUG_AF_HAF_STATE,
10991        DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10992        DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10993        DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10994        DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10995        DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10996        DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10997        DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10998        DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10999        DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
11000        DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
11001        DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
11002        DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
11003        DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
11004        DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
11005        DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
11006        DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
11007        DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
11008        DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
11009        DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
11010        DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
11011        DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
11012        DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
11013        // DevCamDebug metadata result_keys AEC
11014        DEVCAMDEBUG_AEC_TARGET_LUMA,
11015        DEVCAMDEBUG_AEC_COMP_LUMA,
11016        DEVCAMDEBUG_AEC_AVG_LUMA,
11017        DEVCAMDEBUG_AEC_CUR_LUMA,
11018        DEVCAMDEBUG_AEC_LINECOUNT,
11019        DEVCAMDEBUG_AEC_REAL_GAIN,
11020        DEVCAMDEBUG_AEC_EXP_INDEX,
11021        DEVCAMDEBUG_AEC_LUX_IDX,
11022        // DevCamDebug metadata result_keys zzHDR
11023        DEVCAMDEBUG_AEC_L_REAL_GAIN,
11024        DEVCAMDEBUG_AEC_L_LINECOUNT,
11025        DEVCAMDEBUG_AEC_S_REAL_GAIN,
11026        DEVCAMDEBUG_AEC_S_LINECOUNT,
11027        DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
11028        DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
11029        // DevCamDebug metadata result_keys ADRC
11030        DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
11031        DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
11032        DEVCAMDEBUG_AEC_GTM_RATIO,
11033        DEVCAMDEBUG_AEC_LTM_RATIO,
11034        DEVCAMDEBUG_AEC_LA_RATIO,
11035        DEVCAMDEBUG_AEC_GAMMA_RATIO,
11036        // DevCamDebug metadata result_keys AEC MOTION
11037        DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
11038        DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
11039        DEVCAMDEBUG_AEC_SUBJECT_MOTION,
11040        // DevCamDebug metadata result_keys AWB
11041        DEVCAMDEBUG_AWB_R_GAIN,
11042        DEVCAMDEBUG_AWB_G_GAIN,
11043        DEVCAMDEBUG_AWB_B_GAIN,
11044        DEVCAMDEBUG_AWB_CCT,
11045        DEVCAMDEBUG_AWB_DECISION,
11046        /* DevCamDebug metadata end */
11047        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
11048        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
11049        NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
11050        NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
11051        NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
11052        NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
11053        NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
11054        NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
11055        NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
11056        NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
11057        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
11058        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
11059        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
11060        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
11061        NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE,
11062        NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_X,
11063        NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_Y,
11064        NEXUS_EXPERIMENTAL_2017_SUBJECT_MOTION
11065        };
11066 
11067     size_t result_keys_cnt =
11068             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
11069 
11070     Vector<int32_t> available_result_keys;
11071     available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
11072     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
11073         available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
11074     }
11075     if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
11076         available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
11077         available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
11078     }
11079     if (supportedFaceDetectMode == 1) {
11080         available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
11081         available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
11082     } else if ((supportedFaceDetectMode == 2) ||
11083             (supportedFaceDetectMode == 3)) {
11084         available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
11085         available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
11086     }
11087 #ifndef USE_HAL_3_3
11088     {
11089         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
11090         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
11091     }
11092 #endif
11093 
11094     if (gExposeEnableZslKey) {
11095         available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
11096         available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
11097         available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
11098         available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
11099     }
11100 
11101     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
11102             available_result_keys.array(), available_result_keys.size());
11103 
11104     int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
11105        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
11106        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
11107        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
11108        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
11109        ANDROID_SCALER_CROPPING_TYPE,
11110        ANDROID_SYNC_MAX_LATENCY,
11111        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
11112        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
11113        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
11114        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
11115        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
11116        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
11117        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
11118        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
11119        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
11120        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
11121        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
11122        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
11123        ANDROID_LENS_FACING,
11124        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
11125        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
11126        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
11127        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
11128        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
11129        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
11130        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
11131        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
11132        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
11133        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
11134        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
11135        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
11136        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
11137        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
11138        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
11139        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
11140        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
11141        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
11142        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
11143        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
11144        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
11145        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
11146        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
11147        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
11148        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
11149        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
11150        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
11151        ANDROID_TONEMAP_MAX_CURVE_POINTS,
11152        ANDROID_CONTROL_AVAILABLE_MODES,
11153        ANDROID_CONTROL_AE_LOCK_AVAILABLE,
11154        ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
11155        ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
11156        ANDROID_SHADING_AVAILABLE_MODES,
11157        ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
11158 #ifndef USE_HAL_3_3
11159        ANDROID_SENSOR_OPAQUE_RAW_SIZE,
11160        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
11161 #endif
11162        ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS,
11163        ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP,
11164        QCAMERA3_OPAQUE_RAW_FORMAT, QCAMERA3_EXP_TIME_RANGE,
11165        QCAMERA3_SATURATION_RANGE, QCAMERA3_SENSOR_IS_MONO_ONLY,
11166        QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
11167        QCAMERA3_SHARPNESS_RANGE,
11168        QCAMERA3_HISTOGRAM_BUCKETS, QCAMERA3_HISTOGRAM_MAX_COUNT,
11169        QCAMERA3_STATS_BSGC_AVAILABLE
11170        };
11171 
11172     available_characteristics_keys.appendArray(characteristics_keys_basic,
11173             sizeof(characteristics_keys_basic)/sizeof(int32_t));
11174 #ifndef USE_HAL_3_3
11175     if (hasBlackRegions) {
11176         available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
11177     }
11178 #endif
11179 
11180     if (cameraId == 0) {
11181         int32_t lensCalibrationKeys[] = {
11182             ANDROID_LENS_POSE_ROTATION,
11183             ANDROID_LENS_POSE_TRANSLATION,
11184             ANDROID_LENS_POSE_REFERENCE,
11185             ANDROID_LENS_INTRINSIC_CALIBRATION,
11186             ANDROID_LENS_DISTORTION,
11187         };
11188         available_characteristics_keys.appendArray(lensCalibrationKeys,
11189                 sizeof(lensCalibrationKeys) / sizeof(lensCalibrationKeys[0]));
11190     }
11191 
11192     if (0 <= indexPD) {
11193         int32_t depthKeys[] = {
11194                 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
11195                 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
11196                 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
11197                 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
11198                 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
11199         };
11200         available_characteristics_keys.appendArray(depthKeys,
11201                 sizeof(depthKeys) / sizeof(depthKeys[0]));
11202     }
11203 
11204     /*available stall durations depend on the hw + sw and will be different for different devices */
11205     /*have to add for raw after implementation*/
11206     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
11207     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
11208 
11209     Vector<int64_t> available_stall_durations;
11210     for (uint32_t j = 0; j < stall_formats_count; j++) {
11211         if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
11212             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
11213                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
11214                 available_stall_durations.add(stall_formats[j]);
11215                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
11216                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
11217                 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
11218           }
11219         } else {
11220             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
11221                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
11222                 available_stall_durations.add(stall_formats[j]);
11223                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
11224                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
11225                 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
11226             }
11227         }
11228     }
11229     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
11230                       available_stall_durations.array(),
11231                       available_stall_durations.size());
11232 
11233     //QCAMERA3_OPAQUE_RAW
11234     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
11235     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
11236     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
11237     case LEGACY_RAW:
11238         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
11239             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
11240         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
11241             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
11242         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
11243             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
11244         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
11245         break;
11246     case MIPI_RAW:
11247         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
11248             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
11249         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
11250             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
11251         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
11252             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
11253         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
11254         break;
11255     default:
11256         LOGE("unknown opaque_raw_format %d",
11257                 gCamCapability[cameraId]->opaque_raw_fmt);
11258         break;
11259     }
11260     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
11261 
11262     Vector<int32_t> strides;
11263     for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
11264             gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
11265         cam_stream_buf_plane_info_t buf_planes;
11266         strides.add(gCamCapability[cameraId]->raw_dim[i].width);
11267         strides.add(gCamCapability[cameraId]->raw_dim[i].height);
11268         cam_stream_info_t info = {.fmt = fmt};
11269         mm_stream_calc_offset_raw(&info, &gCamCapability[cameraId]->raw_dim[i],
11270             &gCamCapability[cameraId]->padding_info, &buf_planes);
11271         strides.add(buf_planes.plane_info.mp[0].stride);
11272     }
11273 
11274     if (!strides.isEmpty()) {
11275         staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
11276                 strides.size());
11277         available_characteristics_keys.add(QCAMERA3_OPAQUE_RAW_STRIDES);
11278     }
11279 
11280     //TBD: remove the following line once backend advertises zzHDR in feature mask
11281     gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
11282     //Video HDR default
11283     if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
11284             (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
11285             CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
11286         int32_t vhdr_mode[] = {
11287                 QCAMERA3_VIDEO_HDR_MODE_OFF,
11288                 QCAMERA3_VIDEO_HDR_MODE_ON};
11289 
11290         size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
11291         staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
11292                     vhdr_mode, vhdr_mode_count);
11293         available_characteristics_keys.add(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES);
11294     }
11295 
11296     staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
11297             (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
11298             sizeof(gCamCapability[cameraId]->related_cam_calibration));
11299 
11300     uint8_t isMonoOnly =
11301             (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
11302     staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
11303             &isMonoOnly, 1);
11304 
11305 #ifndef USE_HAL_3_3
11306     Vector<int32_t> opaque_size;
11307     for (size_t j = 0; j < scalar_formats_count; j++) {
11308         if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
11309             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
11310                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
11311                 cam_stream_buf_plane_info_t buf_planes;
11312                 cam_stream_info_t info = {.fmt = fmt};
11313                 rc = mm_stream_calc_offset_raw(&info, &gCamCapability[cameraId]->raw_dim[i],
11314                          &gCamCapability[cameraId]->padding_info, &buf_planes);
11315 
11316                 if (rc == 0) {
11317                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
11318                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
11319                     opaque_size.add(buf_planes.plane_info.frame_len);
11320                 }else {
11321                     LOGE("raw frame calculation failed!");
11322                 }
11323             }
11324         }
11325     }
11326 
11327     if ((opaque_size.size() > 0) &&
11328             (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
11329         staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
11330     else
11331         LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
11332 #endif
11333 
11334     if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
11335         int32_t avail_ir_modes[CAM_IR_MODE_MAX];
11336         size = 0;
11337         count = CAM_IR_MODE_MAX;
11338         count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
11339         for (size_t i = 0; i < count; i++) {
11340             int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
11341                     gCamCapability[cameraId]->supported_ir_modes[i]);
11342             if (NAME_NOT_FOUND != val) {
11343                 avail_ir_modes[size] = (int32_t)val;
11344                 size++;
11345             }
11346         }
11347         staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
11348                 avail_ir_modes, size);
11349         available_characteristics_keys.add(QCAMERA3_IR_AVAILABLE_MODES);
11350     }
11351 
11352     if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
11353         uint8_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
11354         size = 0;
11355         count = CAM_AEC_CONVERGENCE_MAX;
11356         count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
11357         for (size_t i = 0; i < count; i++) {
11358             int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
11359                     gCamCapability[cameraId]->supported_instant_aec_modes[i]);
11360             if (NAME_NOT_FOUND != val) {
11361                 available_instant_aec_modes[size] = (uint8_t)val;
11362                 size++;
11363             }
11364         }
11365         staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
11366                 available_instant_aec_modes, size);
11367         available_characteristics_keys.add(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES);
11368     }
11369 
11370     int32_t sharpness_range[] = {
11371             gCamCapability[cameraId]->sharpness_ctrl.min_value,
11372             gCamCapability[cameraId]->sharpness_ctrl.max_value};
11373     staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
11374 
11375     if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
11376         int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
11377         size = 0;
11378         count = CAM_BINNING_CORRECTION_MODE_MAX;
11379         count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
11380         for (size_t i = 0; i < count; i++) {
11381             int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
11382                     METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
11383                     gCamCapability[cameraId]->supported_binning_modes[i]);
11384             if (NAME_NOT_FOUND != val) {
11385                 avail_binning_modes[size] = (int32_t)val;
11386                 size++;
11387             }
11388         }
11389         staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
11390                 avail_binning_modes, size);
11391         available_characteristics_keys.add(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES);
11392     }
11393 
11394     if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
11395         int32_t available_aec_modes[CAM_AEC_MODE_MAX];
11396         size = 0;
11397         count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
11398         for (size_t i = 0; i < count; i++) {
11399             int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
11400                     gCamCapability[cameraId]->supported_aec_modes[i]);
11401             if (NAME_NOT_FOUND != val)
11402                 available_aec_modes[size++] = val;
11403         }
11404         staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
11405                 available_aec_modes, size);
11406         available_characteristics_keys.add(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES);
11407     }
11408 
11409     if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
11410         int32_t available_iso_modes[CAM_ISO_MODE_MAX];
11411         size = 0;
11412         count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
11413         for (size_t i = 0; i < count; i++) {
11414             int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
11415                     gCamCapability[cameraId]->supported_iso_modes[i]);
11416             if (NAME_NOT_FOUND != val)
11417                 available_iso_modes[size++] = val;
11418         }
11419         staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
11420                 available_iso_modes, size);
11421         available_characteristics_keys.add(QCAMERA3_ISO_AVAILABLE_MODES);
11422     }
11423 
11424     int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
11425     for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
11426         available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
11427     staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
11428             available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
11429 
11430     int32_t available_saturation_range[4];
11431     available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
11432     available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
11433     available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
11434     available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
11435     staticInfo.update(QCAMERA3_SATURATION_RANGE,
11436             available_saturation_range, 4);
11437 
11438     uint8_t is_hdr_values[2];
11439     is_hdr_values[0] = 0;
11440     is_hdr_values[1] = 1;
11441     staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
11442             is_hdr_values, 2);
11443 
11444     float is_hdr_confidence_range[2];
11445     is_hdr_confidence_range[0] = 0.0;
11446     is_hdr_confidence_range[1] = 1.0;
11447     staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
11448             is_hdr_confidence_range, 2);
11449 
11450     size_t eepromLength = strnlen(
11451             reinterpret_cast<const char *>(
11452                     gCamCapability[cameraId]->eeprom_version_info),
11453             sizeof(gCamCapability[cameraId]->eeprom_version_info));
11454     if (0 < eepromLength) {
11455         char easelInfo[] = ",E:N";
11456         char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
11457         if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
11458             eepromLength += sizeof(easelInfo);
11459             strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
11460                     gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-Y" : ",E:N"),
11461                     MAX_EEPROM_VERSION_INFO_LEN);
11462         }
11463         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
11464                 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
11465         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO);
11466 
11467         staticInfo.update(ANDROID_INFO_VERSION,
11468                 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
11469         available_characteristics_keys.add(ANDROID_INFO_VERSION);
11470     }
11471 
11472     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
11473                       available_characteristics_keys.array(),
11474                       available_characteristics_keys.size());
11475 
11476     std::vector<uint8_t> availableOisModes;
11477     availableOisModes.push_back(ANDROID_STATISTICS_OIS_DATA_MODE_OFF);
11478     if (cameraId == 0) {
11479         availableOisModes.push_back(ANDROID_STATISTICS_OIS_DATA_MODE_ON);
11480     }
11481 
11482     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES,
11483                       availableOisModes.data(),
11484                       availableOisModes.size());
11485 
11486     gStaticMetadata[cameraId] = staticInfo.release();
11487     return rc;
11488 }
11489 
11490 /*===========================================================================
11491  * FUNCTION   : makeTable
11492  *
11493  * DESCRIPTION: make a table of sizes
11494  *
11495  * PARAMETERS :
11496  *
11497  *
11498  *==========================================================================*/
makeTable(cam_dimension_t * dimTable,size_t size,size_t max_size,int32_t * sizeTable)11499 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
11500         size_t max_size, int32_t *sizeTable)
11501 {
11502     size_t j = 0;
11503     if (size > max_size) {
11504        size = max_size;
11505     }
11506     for (size_t i = 0; i < size; i++) {
11507         sizeTable[j] = dimTable[i].width;
11508         sizeTable[j+1] = dimTable[i].height;
11509         j+=2;
11510     }
11511 }
11512 
11513 /*===========================================================================
11514  * FUNCTION   : makeFPSTable
11515  *
11516  * DESCRIPTION: make a table of fps ranges
11517  *
11518  * PARAMETERS :
11519  *
11520  *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,size_t size,size_t max_size,int32_t * fpsRangesTable)11521 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
11522         size_t max_size, int32_t *fpsRangesTable)
11523 {
11524     size_t j = 0;
11525     if (size > max_size) {
11526        size = max_size;
11527     }
11528     for (size_t i = 0; i < size; i++) {
11529         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
11530         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
11531         j+=2;
11532     }
11533 }
11534 
11535 /*===========================================================================
11536  * FUNCTION   : makeOverridesList
11537  *
11538  * DESCRIPTION: make a list of scene mode overrides
11539  *
11540  * PARAMETERS :
11541  *
11542  *
11543  *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,size_t size,size_t max_size,uint8_t * overridesList,uint8_t * supported_indexes,uint32_t camera_id)11544 void QCamera3HardwareInterface::makeOverridesList(
11545         cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
11546         uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
11547 {
11548     /*daemon will give a list of overrides for all scene modes.
11549       However we should send the fwk only the overrides for the scene modes
11550       supported by the framework*/
11551     size_t j = 0;
11552     if (size > max_size) {
11553        size = max_size;
11554     }
11555     size_t focus_count = CAM_FOCUS_MODE_MAX;
11556     focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
11557             focus_count);
11558     for (size_t i = 0; i < size; i++) {
11559         bool supt = false;
11560         size_t index = supported_indexes[i];
11561         overridesList[j] = gCamCapability[camera_id]->flash_available ?
11562                 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
11563         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
11564                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11565                 overridesTable[index].awb_mode);
11566         if (NAME_NOT_FOUND != val) {
11567             overridesList[j+1] = (uint8_t)val;
11568         }
11569         uint8_t focus_override = overridesTable[index].af_mode;
11570         for (size_t k = 0; k < focus_count; k++) {
11571            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
11572               supt = true;
11573               break;
11574            }
11575         }
11576         if (supt) {
11577             val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11578                     focus_override);
11579             if (NAME_NOT_FOUND != val) {
11580                 overridesList[j+2] = (uint8_t)val;
11581             }
11582         } else {
11583            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
11584         }
11585         j+=3;
11586     }
11587 }
11588 
11589 /*===========================================================================
11590  * FUNCTION   : filterJpegSizes
11591  *
11592  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
11593  *              could be downscaled to
11594  *
11595  * PARAMETERS :
11596  *
11597  * RETURN     : length of jpegSizes array
11598  *==========================================================================*/
11599 
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,size_t processedSizesCnt,size_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)11600 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
11601         size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
11602         uint8_t downscale_factor)
11603 {
11604     if (0 == downscale_factor) {
11605         downscale_factor = 1;
11606     }
11607 
11608     int32_t min_width = active_array_size.width / downscale_factor;
11609     int32_t min_height = active_array_size.height / downscale_factor;
11610     size_t jpegSizesCnt = 0;
11611     if (processedSizesCnt > maxCount) {
11612         processedSizesCnt = maxCount;
11613     }
11614     for (size_t i = 0; i < processedSizesCnt; i+=2) {
11615         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
11616             jpegSizes[jpegSizesCnt] = processedSizes[i];
11617             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
11618             jpegSizesCnt += 2;
11619         }
11620     }
11621     return jpegSizesCnt;
11622 }
11623 
11624 /*===========================================================================
11625  * FUNCTION   : computeNoiseModelEntryS
11626  *
11627  * DESCRIPTION: function to map a given sensitivity to the S noise
11628  *              model parameters in the DNG noise model.
11629  *
11630  * PARAMETERS : sens : the sensor sensitivity
11631  *
11632  ** RETURN    : S (sensor amplification) noise
11633  *
11634  *==========================================================================*/
computeNoiseModelEntryS(int32_t sens)11635 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
11636     double s = gCamCapability[mCameraId]->gradient_S * sens +
11637             gCamCapability[mCameraId]->offset_S;
11638     return ((s < 0.0) ? 0.0 : s);
11639 }
11640 
11641 /*===========================================================================
11642  * FUNCTION   : computeNoiseModelEntryO
11643  *
11644  * DESCRIPTION: function to map a given sensitivity to the O noise
11645  *              model parameters in the DNG noise model.
11646  *
11647  * PARAMETERS : sens : the sensor sensitivity
11648  *
11649  ** RETURN    : O (sensor readout) noise
11650  *
11651  *==========================================================================*/
computeNoiseModelEntryO(int32_t sens)11652 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
11653     int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
11654     double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
11655             1.0 : (1.0 * sens / max_analog_sens);
11656     double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
11657             gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
11658     return ((o < 0.0) ? 0.0 : o);
11659 }
11660 
11661 /*===========================================================================
11662  * FUNCTION   : getSensorSensitivity
11663  *
11664  * DESCRIPTION: convert iso_mode to an integer value
11665  *
11666  * PARAMETERS : iso_mode : the iso_mode supported by sensor
11667  *
11668  ** RETURN    : sensitivity supported by sensor
11669  *
11670  *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)11671 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
11672 {
11673     int32_t sensitivity;
11674 
11675     switch (iso_mode) {
11676     case CAM_ISO_MODE_100:
11677         sensitivity = 100;
11678         break;
11679     case CAM_ISO_MODE_200:
11680         sensitivity = 200;
11681         break;
11682     case CAM_ISO_MODE_400:
11683         sensitivity = 400;
11684         break;
11685     case CAM_ISO_MODE_800:
11686         sensitivity = 800;
11687         break;
11688     case CAM_ISO_MODE_1600:
11689         sensitivity = 1600;
11690         break;
11691     default:
11692         sensitivity = -1;
11693         break;
11694     }
11695     return sensitivity;
11696 }
11697 
initHdrPlusClientLocked()11698 int QCamera3HardwareInterface::initHdrPlusClientLocked() {
11699     if (gEaselManagerClient == nullptr) {
11700         gEaselManagerClient = EaselManagerClient::create();
11701         if (gEaselManagerClient == nullptr) {
11702             ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
11703             return -ENODEV;
11704         }
11705     }
11706 
11707     if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
11708         // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
11709         //  to connect to Easel.
11710         bool doNotpowerOnEasel =
11711                 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
11712 
11713         if (doNotpowerOnEasel) {
11714             ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
11715             return OK;
11716         }
11717 
11718         // If Easel is present, power on Easel and suspend it immediately.
11719         status_t res = gEaselManagerClient->open();
11720         if (res != OK) {
11721             ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
11722                     res);
11723             return res;
11724         }
11725 
11726         EaselManagerClientOpened = true;
11727 
11728         res = gEaselManagerClient->suspend();
11729         if (res != OK) {
11730             ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
11731         }
11732 
11733         gEaselBypassOnly = property_get_bool("persist.camera.hdrplus.disable", false);
11734         gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
11735 
11736         // Expose enableZsl key only when HDR+ mode is enabled.
11737         gExposeEnableZslKey = !gEaselBypassOnly;
11738     }
11739 
11740     return OK;
11741 }
11742 
11743 /*===========================================================================
11744  * FUNCTION   : isStreamCombinationSupported
11745  *
11746  * DESCRIPTION: query camera support for specific stream combination
11747  *
11748  * PARAMETERS :
11749  *   @cameraId  : camera Id
11750  *   @comb      : stream combination
11751  *
11752  * RETURN     : int type of status
11753  *              NO_ERROR  -- in case combination is supported
11754  *              none-zero failure code
11755  *==========================================================================*/
isStreamCombinationSupported(uint32_t cameraId,const camera_stream_combination_t * comb)11756 int QCamera3HardwareInterface::isStreamCombinationSupported(uint32_t cameraId,
11757         const camera_stream_combination_t *comb)
11758 {
11759     int rc = BAD_VALUE;
11760     pthread_mutex_lock(&gCamLock);
11761 
11762     if (NULL == gCamCapability[cameraId]) {
11763         rc = initCapabilities(cameraId);
11764         if (rc < 0) {
11765             pthread_mutex_unlock(&gCamLock);
11766             return rc;
11767         }
11768     }
11769 
11770     camera3_stream_configuration_t streamList = {comb->num_streams, /*streams*/ nullptr,
11771             comb->operation_mode, /*session_parameters*/ nullptr};
11772     streamList.streams = new camera3_stream_t * [comb->num_streams];
11773     camera3_stream_t *streamBuffer = new camera3_stream_t[comb->num_streams];
11774     for (size_t i = 0; i < comb->num_streams; i++) {
11775         streamBuffer[i] = {comb->streams[i].stream_type, comb->streams[i].width,
11776             comb->streams[i].height, comb->streams[i].format, comb->streams[i].usage,
11777             /*max_buffers*/ 0, /*priv*/ nullptr, comb->streams[i].data_space,
11778             comb->streams[i].rotation, comb->streams[i].physical_camera_id, /*reserved*/ {nullptr}};
11779         streamList.streams[i] = &streamBuffer[i];
11780     }
11781 
11782     StreamValidateStatus validateStatus;
11783     rc = validateStreamCombination(cameraId, &streamList, &validateStatus);
11784 
11785     delete [] streamBuffer;
11786     delete [] streamList.streams;
11787     pthread_mutex_unlock(&gCamLock);
11788 
11789     return rc;
11790 }
11791 
11792 /*===========================================================================
11793  * FUNCTION   : getCamInfo
11794  *
11795  * DESCRIPTION: query camera capabilities
11796  *
11797  * PARAMETERS :
11798  *   @cameraId  : camera Id
11799  *   @info      : camera info struct to be filled in with camera capabilities
11800  *
11801  * RETURN     : int type of status
11802  *              NO_ERROR  -- success
11803  *              none-zero failure code
11804  *==========================================================================*/
getCamInfo(uint32_t cameraId,struct camera_info * info)11805 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11806         struct camera_info *info)
11807 {
11808     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
11809     int rc = 0;
11810 
11811     pthread_mutex_lock(&gCamLock);
11812 
11813     {
11814         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
11815         rc = initHdrPlusClientLocked();
11816         if (rc != OK) {
11817             ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11818             pthread_mutex_unlock(&gCamLock);
11819             return rc;
11820         }
11821     }
11822 
11823     if (NULL == gCamCapability[cameraId]) {
11824         rc = initCapabilities(cameraId);
11825         if (rc < 0) {
11826             pthread_mutex_unlock(&gCamLock);
11827             return rc;
11828         }
11829     }
11830 
11831     if (NULL == gStaticMetadata[cameraId]) {
11832         rc = initStaticMetadata(cameraId);
11833         if (rc < 0) {
11834             pthread_mutex_unlock(&gCamLock);
11835             return rc;
11836         }
11837     }
11838 
11839     switch(gCamCapability[cameraId]->position) {
11840     case CAM_POSITION_BACK:
11841     case CAM_POSITION_BACK_AUX:
11842         info->facing = CAMERA_FACING_BACK;
11843         break;
11844 
11845     case CAM_POSITION_FRONT:
11846     case CAM_POSITION_FRONT_AUX:
11847         info->facing = CAMERA_FACING_FRONT;
11848         break;
11849 
11850     default:
11851         LOGE("Unknown position type %d for camera id:%d",
11852                 gCamCapability[cameraId]->position, cameraId);
11853         rc = -1;
11854         break;
11855     }
11856 
11857 
11858     info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
11859 #ifndef USE_HAL_3_3
11860     info->device_version = CAMERA_DEVICE_API_VERSION_3_5;
11861 #else
11862     info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
11863 #endif
11864     info->static_camera_characteristics = gStaticMetadata[cameraId];
11865 
11866     //For now assume both cameras can operate independently.
11867     info->conflicting_devices = NULL;
11868     info->conflicting_devices_length = 0;
11869 
11870     //resource cost is 100 * MIN(1.0, m/M),
11871     //where m is throughput requirement with maximum stream configuration
11872     //and M is CPP maximum throughput.
11873     float max_fps = 0.0;
11874     for (uint32_t i = 0;
11875             i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11876         if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11877             max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11878     }
11879     float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11880             gCamCapability[cameraId]->active_array_size.width *
11881             gCamCapability[cameraId]->active_array_size.height * max_fps /
11882             gCamCapability[cameraId]->max_pixel_bandwidth;
11883     info->resource_cost = 100 * MIN(1.0, ratio);
11884     LOGI("camera %d resource cost is %d", cameraId,
11885             info->resource_cost);
11886 
11887     pthread_mutex_unlock(&gCamLock);
11888     return rc;
11889 }
11890 
11891 /*===========================================================================
11892  * FUNCTION   : translateCapabilityToMetadata
11893  *
11894  * DESCRIPTION: translate the capability into camera_metadata_t
11895  *
11896  * PARAMETERS : type of the request
11897  *
11898  *
11899  * RETURN     : success: camera_metadata_t*
11900  *              failure: NULL
11901  *
11902  *==========================================================================*/
translateCapabilityToMetadata(int type)11903 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11904 {
11905     if (mDefaultMetadata[type] != NULL) {
11906         return mDefaultMetadata[type];
11907     }
11908     //first time we are handling this request
11909     //fill up the metadata structure using the wrapper class
11910     CameraMetadata settings;
11911     //translate from cam_capability_t to camera_metadata_tag_t
11912     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11913     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11914     int32_t defaultRequestID = 0;
11915     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11916 
11917     /* OIS disable */
11918     char ois_prop[PROPERTY_VALUE_MAX];
11919     memset(ois_prop, 0, sizeof(ois_prop));
11920     property_get("persist.camera.ois.disable", ois_prop, "0");
11921     uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11922 
11923     /* Force video to use OIS */
11924     char videoOisProp[PROPERTY_VALUE_MAX];
11925     memset(videoOisProp, 0, sizeof(videoOisProp));
11926     property_get("persist.camera.ois.video", videoOisProp, "1");
11927     uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
11928 
11929     // Hybrid AE enable/disable
11930     char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11931     memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11932     property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11933     uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11934 
11935     uint8_t controlIntent = 0;
11936     uint8_t focusMode;
11937     uint8_t vsMode;
11938     uint8_t optStabMode;
11939     uint8_t cacMode;
11940     uint8_t edge_mode;
11941     uint8_t noise_red_mode;
11942     uint8_t shading_mode;
11943     uint8_t hot_pixel_mode;
11944     uint8_t tonemap_mode;
11945     bool highQualityModeEntryAvailable = FALSE;
11946     bool fastModeEntryAvailable = FALSE;
11947     uint8_t histogramEnable = false;
11948     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11949     optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11950     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
11951     uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
11952     uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
11953 
11954     switch (type) {
11955       case CAMERA3_TEMPLATE_PREVIEW:
11956         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11957         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11958         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11959         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11960         edge_mode = ANDROID_EDGE_MODE_FAST;
11961         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11962         shading_mode = ANDROID_SHADING_MODE_FAST;
11963         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
11964         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11965         break;
11966       case CAMERA3_TEMPLATE_STILL_CAPTURE:
11967         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11968         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11969         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11970         edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11971         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11972         shading_mode = ANDROID_SHADING_MODE_HIGH_QUALITY;
11973         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
11974         tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11975         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11976         // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11977         for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11978             if (gCamCapability[mCameraId]->aberration_modes[i] ==
11979                     CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11980                 highQualityModeEntryAvailable = TRUE;
11981             } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11982                     CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11983                 fastModeEntryAvailable = TRUE;
11984             }
11985         }
11986         if (highQualityModeEntryAvailable) {
11987             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11988         } else if (fastModeEntryAvailable) {
11989             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11990         }
11991         if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11992             shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11993         }
11994         enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
11995         break;
11996       case CAMERA3_TEMPLATE_VIDEO_RECORD:
11997         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11998         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11999         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12000         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12001         edge_mode = ANDROID_EDGE_MODE_FAST;
12002         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
12003         shading_mode = ANDROID_SHADING_MODE_FAST;
12004         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12005         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12006         if (forceVideoOis)
12007             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
12008         break;
12009       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
12010         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
12011         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
12012         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12013         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12014         edge_mode = ANDROID_EDGE_MODE_FAST;
12015         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
12016         shading_mode = ANDROID_SHADING_MODE_FAST;
12017         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12018         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12019         if (forceVideoOis)
12020             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
12021         break;
12022       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
12023         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
12024         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
12025         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
12026         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12027         edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
12028         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
12029         shading_mode = ANDROID_SHADING_MODE_FAST;
12030         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12031         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12032         break;
12033       case CAMERA3_TEMPLATE_MANUAL:
12034         edge_mode = ANDROID_EDGE_MODE_FAST;
12035         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
12036         shading_mode = ANDROID_SHADING_MODE_FAST;
12037         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12038         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12039         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12040         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
12041         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
12042         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12043         break;
12044       default:
12045         edge_mode = ANDROID_EDGE_MODE_FAST;
12046         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
12047         shading_mode = ANDROID_SHADING_MODE_FAST;
12048         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
12049         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
12050         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
12051         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
12052         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
12053         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12054         break;
12055     }
12056     // Set CAC to OFF if underlying device doesn't support
12057     if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12058         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
12059     }
12060     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
12061     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
12062     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
12063     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
12064         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
12065     }
12066     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
12067     settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
12068     settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
12069 
12070     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
12071             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
12072         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
12073     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
12074             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
12075             || ois_disable)
12076         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
12077     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
12078     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
12079 
12080     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
12081             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
12082 
12083     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
12084     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
12085 
12086     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
12087     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
12088 
12089     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
12090     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
12091 
12092     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
12093     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
12094 
12095     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
12096     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
12097 
12098     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
12099     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
12100 
12101     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
12102     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
12103 
12104     /*flash*/
12105     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
12106     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
12107 
12108     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
12109     settings.update(ANDROID_FLASH_FIRING_POWER,
12110             &flashFiringLevel, 1);
12111 
12112     /* lens */
12113     float default_aperture = gCamCapability[mCameraId]->apertures[0];
12114     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
12115 
12116     if (gCamCapability[mCameraId]->filter_densities_count) {
12117         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
12118         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
12119                         gCamCapability[mCameraId]->filter_densities_count);
12120     }
12121 
12122     float default_focal_length = gCamCapability[mCameraId]->focal_length;
12123     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
12124 
12125     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
12126     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
12127 
12128     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
12129     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
12130 
12131     /* face detection (default to OFF) */
12132     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
12133     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
12134 
12135     static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
12136     settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
12137 
12138     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
12139     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
12140 
12141     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
12142     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
12143 
12144 
12145     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
12146     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
12147 
12148     /* Exposure time(Update the Min Exposure Time)*/
12149     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
12150     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
12151 
12152     /* frame duration */
12153     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
12154     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
12155 
12156     /* sensitivity */
12157     static const int32_t default_sensitivity = 100;
12158     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
12159 #ifndef USE_HAL_3_3
12160     static const int32_t default_isp_sensitivity =
12161             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12162     settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
12163 #endif
12164 
12165     /*edge mode*/
12166     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
12167 
12168     /*noise reduction mode*/
12169     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
12170 
12171     /*shading mode*/
12172     settings.update(ANDROID_SHADING_MODE, &shading_mode, 1);
12173 
12174     /*hot pixel mode*/
12175     settings.update(ANDROID_HOT_PIXEL_MODE, &hot_pixel_mode, 1);
12176 
12177     /*color correction mode*/
12178     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
12179     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
12180 
12181     /*transform matrix mode*/
12182     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
12183 
12184     int32_t scaler_crop_region[4];
12185     scaler_crop_region[0] = 0;
12186     scaler_crop_region[1] = 0;
12187     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
12188     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
12189     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
12190 
12191     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
12192     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
12193 
12194     /*focus distance*/
12195     float focus_distance = 0.0;
12196     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
12197 
12198     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
12199     /* Restrict template max_fps to 30 */
12200     float max_range = 0.0;
12201     float max_fixed_fps = 0.0;
12202     int32_t fps_range[2] = {0, 0};
12203     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
12204             i++) {
12205         if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
12206                 TEMPLATE_MAX_PREVIEW_FPS) {
12207             continue;
12208         }
12209         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
12210             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
12211         if (type == CAMERA3_TEMPLATE_PREVIEW ||
12212                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
12213                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
12214             if (range > max_range) {
12215                 fps_range[0] =
12216                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
12217                 fps_range[1] =
12218                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
12219                 max_range = range;
12220             }
12221         } else {
12222             if (range < 0.01 && max_fixed_fps <
12223                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
12224                 fps_range[0] =
12225                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
12226                 fps_range[1] =
12227                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
12228                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
12229             }
12230         }
12231     }
12232     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
12233 
12234     /*precapture trigger*/
12235     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
12236     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
12237 
12238     /*af trigger*/
12239     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
12240     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
12241 
12242     /* ae & af regions */
12243     int32_t active_region[] = {
12244             gCamCapability[mCameraId]->active_array_size.left,
12245             gCamCapability[mCameraId]->active_array_size.top,
12246             gCamCapability[mCameraId]->active_array_size.left +
12247                     gCamCapability[mCameraId]->active_array_size.width,
12248             gCamCapability[mCameraId]->active_array_size.top +
12249                     gCamCapability[mCameraId]->active_array_size.height,
12250             0};
12251     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
12252             sizeof(active_region) / sizeof(active_region[0]));
12253     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
12254             sizeof(active_region) / sizeof(active_region[0]));
12255 
12256     /* black level lock */
12257     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
12258     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
12259 
12260     //special defaults for manual template
12261     if (type == CAMERA3_TEMPLATE_MANUAL) {
12262         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
12263         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
12264 
12265         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
12266         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
12267 
12268         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
12269         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
12270 
12271         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
12272         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
12273 
12274         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
12275         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
12276 
12277         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
12278         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
12279     }
12280 
12281 
12282     /* TNR
12283      * We'll use this location to determine which modes TNR will be set.
12284      * We will enable TNR to be on if either of the Preview/Video stream requires TNR
12285      * This is not to be confused with linking on a per stream basis that decision
12286      * is still on per-session basis and will be handled as part of config stream
12287      */
12288     uint8_t tnr_enable = 0;
12289 
12290     if (m_bTnrPreview || m_bTnrVideo) {
12291 
12292         switch (type) {
12293             case CAMERA3_TEMPLATE_VIDEO_RECORD:
12294                     tnr_enable = 1;
12295                     break;
12296 
12297             default:
12298                     tnr_enable = 0;
12299                     break;
12300         }
12301 
12302         int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
12303         settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
12304         settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
12305 
12306         LOGD("TNR:%d with process plate %d for template:%d",
12307                              tnr_enable, tnr_process_type, type);
12308     }
12309 
12310     //Update Link tags to default
12311     uint8_t sync_type = CAM_TYPE_STANDALONE;
12312     settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
12313 
12314     uint8_t is_main = 1;
12315     settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
12316 
12317     uint8_t related_camera_id = mCameraId;
12318     settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
12319 
12320     /* CDS default */
12321     char prop[PROPERTY_VALUE_MAX];
12322     memset(prop, 0, sizeof(prop));
12323     property_get("persist.camera.CDS", prop, "Auto");
12324     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
12325     cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
12326     if (CAM_CDS_MODE_MAX == cds_mode) {
12327         cds_mode = CAM_CDS_MODE_AUTO;
12328     }
12329 
12330     /* Disabling CDS in templates which have TNR enabled*/
12331     if (tnr_enable)
12332         cds_mode = CAM_CDS_MODE_OFF;
12333 
12334     int32_t mode = cds_mode;
12335     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
12336 
12337     /* Manual Convergence AEC Speed is disabled by default*/
12338     float default_aec_speed = 0;
12339     settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
12340 
12341     /* Manual Convergence AWB Speed is disabled by default*/
12342     float default_awb_speed = 0;
12343     settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
12344 
12345     // Set instant AEC to normal convergence by default
12346     uint8_t instant_aec_mode = (uint8_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
12347     settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
12348 
12349     uint8_t oisDataMode = ANDROID_STATISTICS_OIS_DATA_MODE_OFF;
12350     if (mCameraId == 0) {
12351         oisDataMode = ANDROID_STATISTICS_OIS_DATA_MODE_ON;
12352     }
12353     settings.update(ANDROID_STATISTICS_OIS_DATA_MODE, &oisDataMode, 1);
12354 
12355     if (gExposeEnableZslKey) {
12356         settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
12357         int32_t postview = 0;
12358         settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
12359         int32_t continuousZslCapture = 0;
12360         settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
12361         // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE and
12362         // CAMERA3_TEMPLATE_PREVIEW.
12363         int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
12364                                   type == CAMERA3_TEMPLATE_PREVIEW) ? 0 : 1;
12365         settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
12366 
12367         // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
12368         // hybrid ae is enabled for 3rd party app HDR+.
12369         if (type == CAMERA3_TEMPLATE_PREVIEW ||
12370                 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
12371             hybrid_ae = 1;
12372         }
12373     }
12374     /* hybrid ae */
12375     settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
12376 
12377     int32_t fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
12378     settings.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
12379 
12380     mDefaultMetadata[type] = settings.release();
12381 
12382     return mDefaultMetadata[type];
12383 }
12384 
12385 /*===========================================================================
12386  * FUNCTION   : getExpectedFrameDuration
12387  *
12388  * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
12389  *              duration
12390  *
12391  * PARAMETERS :
12392  *   @request   : request settings
12393  *   @frameDuration : The maximum frame duration in nanoseconds
12394  *
12395  * RETURN     : None
12396  *==========================================================================*/
getExpectedFrameDuration(const camera_metadata_t * request,nsecs_t * frameDuration)12397 void QCamera3HardwareInterface::getExpectedFrameDuration(
12398         const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
12399     if (nullptr == frameDuration) {
12400         return;
12401     }
12402 
12403     camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
12404     find_camera_metadata_ro_entry(request,
12405             ANDROID_SENSOR_EXPOSURE_TIME,
12406             &e);
12407     if (e.count > 0) {
12408         *frameDuration = e.data.i64[0];
12409     }
12410     find_camera_metadata_ro_entry(request,
12411             ANDROID_SENSOR_FRAME_DURATION,
12412             &e);
12413     if (e.count > 0) {
12414         *frameDuration = std::max(e.data.i64[0], *frameDuration);
12415     }
12416 }
12417 
12418 /*===========================================================================
12419  * FUNCTION   : calculateMaxExpectedDuration
12420  *
12421  * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
12422  *              current camera settings.
12423  *
12424  * PARAMETERS :
12425  *   @request   : request settings
12426  *
12427  * RETURN     : Expected frame duration in nanoseconds.
12428  *==========================================================================*/
calculateMaxExpectedDuration(const camera_metadata_t * request)12429 nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
12430         const camera_metadata_t *request) {
12431     nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
12432     camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
12433     find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
12434     if (e.count == 0) {
12435         return maxExpectedDuration;
12436     }
12437 
12438     if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
12439         getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
12440     }
12441 
12442     if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
12443         return maxExpectedDuration;
12444     }
12445 
12446     find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
12447     if (e.count == 0) {
12448         return maxExpectedDuration;
12449     }
12450 
12451     switch (e.data.u8[0]) {
12452         case ANDROID_CONTROL_AE_MODE_OFF:
12453             getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
12454             break;
12455         default:
12456             find_camera_metadata_ro_entry(request,
12457                     ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
12458                     &e);
12459             if (e.count > 1) {
12460                 maxExpectedDuration = 1e9 / e.data.u8[0];
12461             }
12462             break;
12463     }
12464 
12465     return maxExpectedDuration;
12466 }
12467 
12468 /*===========================================================================
12469  * FUNCTION   : setFrameParameters
12470  *
12471  * DESCRIPTION: set parameters per frame as requested in the metadata from
12472  *              framework
12473  *
12474  * PARAMETERS :
12475  *   @request   : request that needs to be serviced
12476  *   @streamsArray : Stream ID of all the requested streams
12477  *   @blob_request: Whether this request is a blob request or not
12478  *
12479  * RETURN     : success: NO_ERROR
12480  *              failure:
12481  *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamsArray,int blob_request,uint32_t snapshotStreamId)12482 int QCamera3HardwareInterface::setFrameParameters(
12483                     camera3_capture_request_t *request,
12484                     cam_stream_ID_t streamsArray,
12485                     int blob_request,
12486                     uint32_t snapshotStreamId)
12487 {
12488     /*translate from camera_metadata_t type to parm_type_t*/
12489     int rc = 0;
12490     int32_t hal_version = CAM_HAL_V3;
12491 
12492     clear_metadata_buffer(mParameters);
12493     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
12494         LOGE("Failed to set hal version in the parameters");
12495         return BAD_VALUE;
12496     }
12497 
12498     /*we need to update the frame number in the parameters*/
12499     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
12500             request->frame_number)) {
12501         LOGE("Failed to set the frame number in the parameters");
12502         return BAD_VALUE;
12503     }
12504 
12505     /* Update stream id of all the requested buffers */
12506     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
12507         LOGE("Failed to set stream type mask in the parameters");
12508         return BAD_VALUE;
12509     }
12510 
12511     if (mUpdateDebugLevel) {
12512         uint32_t dummyDebugLevel = 0;
12513         /* The value of dummyDebugLevel is irrelavent. On
12514          * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
12515         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
12516                 dummyDebugLevel)) {
12517             LOGE("Failed to set UPDATE_DEBUG_LEVEL");
12518             return BAD_VALUE;
12519         }
12520         mUpdateDebugLevel = false;
12521     }
12522 
12523     if(request->settings != NULL){
12524         mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
12525         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
12526         if (blob_request)
12527             memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
12528     }
12529 
12530     return rc;
12531 }
12532 
12533 /*===========================================================================
12534  * FUNCTION   : setReprocParameters
12535  *
12536  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
12537  *              return it.
12538  *
12539  * PARAMETERS :
12540  *   @request   : request that needs to be serviced
12541  *
12542  * RETURN     : success: NO_ERROR
12543  *              failure:
12544  *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)12545 int32_t QCamera3HardwareInterface::setReprocParameters(
12546         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
12547         uint32_t snapshotStreamId)
12548 {
12549     /*translate from camera_metadata_t type to parm_type_t*/
12550     int rc = 0;
12551 
12552     if (NULL == request->settings){
12553         LOGE("Reprocess settings cannot be NULL");
12554         return BAD_VALUE;
12555     }
12556 
12557     if (NULL == reprocParam) {
12558         LOGE("Invalid reprocessing metadata buffer");
12559         return BAD_VALUE;
12560     }
12561     clear_metadata_buffer(reprocParam);
12562 
12563     /*we need to update the frame number in the parameters*/
12564     if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
12565             request->frame_number)) {
12566         LOGE("Failed to set the frame number in the parameters");
12567         return BAD_VALUE;
12568     }
12569 
12570     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
12571     if (rc < 0) {
12572         LOGE("Failed to translate reproc request");
12573         return rc;
12574     }
12575 
12576     CameraMetadata frame_settings;
12577     frame_settings = request->settings;
12578     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
12579             frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
12580         int32_t *crop_count =
12581                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
12582         int32_t *crop_data =
12583                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
12584         int32_t *roi_map =
12585                 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
12586         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
12587             cam_crop_data_t crop_meta;
12588             memset(&crop_meta, 0, sizeof(cam_crop_data_t));
12589             crop_meta.num_of_streams = 1;
12590             crop_meta.crop_info[0].crop.left   = crop_data[0];
12591             crop_meta.crop_info[0].crop.top    = crop_data[1];
12592             crop_meta.crop_info[0].crop.width  = crop_data[2];
12593             crop_meta.crop_info[0].crop.height = crop_data[3];
12594 
12595             crop_meta.crop_info[0].roi_map.left =
12596                     roi_map[0];
12597             crop_meta.crop_info[0].roi_map.top =
12598                     roi_map[1];
12599             crop_meta.crop_info[0].roi_map.width =
12600                     roi_map[2];
12601             crop_meta.crop_info[0].roi_map.height =
12602                     roi_map[3];
12603 
12604             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
12605                 rc = BAD_VALUE;
12606             }
12607             LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
12608                     request->input_buffer->stream,
12609                     crop_meta.crop_info[0].crop.left,
12610                     crop_meta.crop_info[0].crop.top,
12611                     crop_meta.crop_info[0].crop.width,
12612                     crop_meta.crop_info[0].crop.height);
12613             LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
12614                     request->input_buffer->stream,
12615                     crop_meta.crop_info[0].roi_map.left,
12616                     crop_meta.crop_info[0].roi_map.top,
12617                     crop_meta.crop_info[0].roi_map.width,
12618                     crop_meta.crop_info[0].roi_map.height);
12619             } else {
12620                 LOGE("Invalid reprocess crop count %d!", *crop_count);
12621             }
12622     } else {
12623         LOGE("No crop data from matching output stream");
12624     }
12625 
12626     /* These settings are not needed for regular requests so handle them specially for
12627        reprocess requests; information needed for EXIF tags */
12628     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12629         int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12630                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12631         if (NAME_NOT_FOUND != val) {
12632             uint32_t flashMode = (uint32_t)val;
12633             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
12634                 rc = BAD_VALUE;
12635             }
12636         } else {
12637             LOGE("Could not map fwk flash mode %d to correct hal flash mode",
12638                     frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12639         }
12640     } else {
12641         LOGH("No flash mode in reprocess settings");
12642     }
12643 
12644     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
12645         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
12646         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
12647             rc = BAD_VALUE;
12648         }
12649     } else {
12650         LOGH("No flash state in reprocess settings");
12651     }
12652 
12653     if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
12654         uint8_t *reprocessFlags =
12655             frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
12656         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
12657                 *reprocessFlags)) {
12658                 rc = BAD_VALUE;
12659         }
12660     }
12661 
12662     // Add exif debug data to internal metadata
12663     if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
12664         mm_jpeg_debug_exif_params_t *debug_params =
12665                 (mm_jpeg_debug_exif_params_t *)frame_settings.find
12666                 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
12667         // AE
12668         if (debug_params->ae_debug_params_valid == TRUE) {
12669             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
12670                     debug_params->ae_debug_params);
12671         }
12672         // AWB
12673         if (debug_params->awb_debug_params_valid == TRUE) {
12674             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
12675                 debug_params->awb_debug_params);
12676         }
12677         // AF
12678        if (debug_params->af_debug_params_valid == TRUE) {
12679             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
12680                    debug_params->af_debug_params);
12681         }
12682         // ASD
12683         if (debug_params->asd_debug_params_valid == TRUE) {
12684             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
12685                     debug_params->asd_debug_params);
12686         }
12687         // Stats
12688         if (debug_params->stats_debug_params_valid == TRUE) {
12689             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
12690                     debug_params->stats_debug_params);
12691        }
12692         // BE Stats
12693         if (debug_params->bestats_debug_params_valid == TRUE) {
12694             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
12695                     debug_params->bestats_debug_params);
12696         }
12697         // BHIST
12698         if (debug_params->bhist_debug_params_valid == TRUE) {
12699             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
12700                     debug_params->bhist_debug_params);
12701        }
12702         // 3A Tuning
12703         if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
12704             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
12705                     debug_params->q3a_tuning_debug_params);
12706         }
12707     }
12708 
12709     // Add metadata which reprocess needs
12710     if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
12711         cam_reprocess_info_t *repro_info =
12712                 (cam_reprocess_info_t *)frame_settings.find
12713                 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
12714         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
12715                 repro_info->sensor_crop_info);
12716         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
12717                 repro_info->camif_crop_info);
12718         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
12719                 repro_info->isp_crop_info);
12720         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
12721                 repro_info->cpp_crop_info);
12722         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
12723                 repro_info->af_focal_length_ratio);
12724         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
12725                 repro_info->pipeline_flip);
12726         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
12727                 repro_info->af_roi);
12728         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
12729                 repro_info->dyn_mask);
12730         /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
12731            CAM_INTF_PARM_ROTATION metadata then has been added in
12732            translateToHalMetadata. HAL need to keep this new rotation
12733            metadata. Otherwise, the old rotation info saved in the vendor tag
12734            would be used */
12735         IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
12736                 CAM_INTF_PARM_ROTATION, reprocParam) {
12737             LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
12738         } else {
12739             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
12740                     repro_info->rotation_info);
12741         }
12742     }
12743 
12744     /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
12745        to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
12746        roi.width and roi.height would be the final JPEG size.
12747        For now, HAL only checks this for reprocess request */
12748     if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
12749             frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
12750         uint8_t *enable =
12751             frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
12752         if (*enable == TRUE) {
12753             int32_t *crop_data =
12754                     frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
12755             cam_stream_crop_info_t crop_meta;
12756             memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
12757             crop_meta.stream_id = 0;
12758             crop_meta.crop.left   = crop_data[0];
12759             crop_meta.crop.top    = crop_data[1];
12760             crop_meta.crop.width  = crop_data[2];
12761             crop_meta.crop.height = crop_data[3];
12762             // The JPEG crop roi should match cpp output size
12763             IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
12764                     CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
12765                 crop_meta.roi_map.left = 0;
12766                 crop_meta.roi_map.top = 0;
12767                 crop_meta.roi_map.width = cpp_crop->crop.width;
12768                 crop_meta.roi_map.height = cpp_crop->crop.height;
12769             }
12770             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
12771                     crop_meta);
12772             LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
12773                     crop_meta.crop.left, crop_meta.crop.top,
12774                     crop_meta.crop.width, crop_meta.crop.height, mCameraId);
12775             LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
12776                     crop_meta.roi_map.left, crop_meta.roi_map.top,
12777                     crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
12778 
12779             // Add JPEG scale information
12780             cam_dimension_t scale_dim;
12781             memset(&scale_dim, 0, sizeof(cam_dimension_t));
12782             if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
12783                 int32_t *roi =
12784                     frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
12785                 scale_dim.width = roi[2];
12786                 scale_dim.height = roi[3];
12787                 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
12788                     scale_dim);
12789                 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
12790                     scale_dim.width, scale_dim.height, mCameraId);
12791             }
12792         }
12793     }
12794 
12795     return rc;
12796 }
12797 
12798 /*===========================================================================
12799  * FUNCTION   : saveRequestSettings
12800  *
12801  * DESCRIPTION: Add any settings that might have changed to the request settings
12802  *              and save the settings to be applied on the frame
12803  *
12804  * PARAMETERS :
12805  *   @jpegMetadata : the extracted and/or modified jpeg metadata
12806  *   @request      : request with initial settings
12807  *
12808  * RETURN     :
12809  * camera_metadata_t* : pointer to the saved request settings
12810  *==========================================================================*/
saveRequestSettings(const CameraMetadata & jpegMetadata,camera3_capture_request_t * request)12811 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
12812         const CameraMetadata &jpegMetadata,
12813         camera3_capture_request_t *request)
12814 {
12815     camera_metadata_t *resultMetadata;
12816     CameraMetadata camMetadata;
12817     camMetadata = request->settings;
12818 
12819     if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12820         int32_t thumbnail_size[2];
12821         thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12822         thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12823         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
12824                 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
12825     }
12826 
12827     if (request->input_buffer != NULL) {
12828         uint8_t reprocessFlags = 1;
12829         camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12830                 (uint8_t*)&reprocessFlags,
12831                 sizeof(reprocessFlags));
12832     }
12833 
12834     resultMetadata = camMetadata.release();
12835     return resultMetadata;
12836 }
12837 
12838 /*===========================================================================
12839  * FUNCTION   : setHalFpsRange
12840  *
12841  * DESCRIPTION: set FPS range parameter
12842  *
12843  *
12844  * PARAMETERS :
12845  *   @settings    : Metadata from framework
12846  *   @hal_metadata: Metadata buffer
12847  *
12848  *
12849  * RETURN     : success: NO_ERROR
12850  *              failure:
12851  *==========================================================================*/
setHalFpsRange(const CameraMetadata & settings,metadata_buffer_t * hal_metadata)12852 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12853         metadata_buffer_t *hal_metadata)
12854 {
12855     int32_t rc = NO_ERROR;
12856     cam_fps_range_t fps_range;
12857     fps_range.min_fps = (float)
12858             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12859     fps_range.max_fps = (float)
12860             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12861     fps_range.video_min_fps = fps_range.min_fps;
12862     fps_range.video_max_fps = fps_range.max_fps;
12863 
12864     LOGD("aeTargetFpsRange fps: [%f %f]",
12865             fps_range.min_fps, fps_range.max_fps);
12866     /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12867      * follows:
12868      * ---------------------------------------------------------------|
12869      *      Video stream is absent in configure_streams               |
12870      *    (Camcorder preview before the first video record            |
12871      * ---------------------------------------------------------------|
12872      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12873      *                   |             |             | vid_min/max_fps|
12874      * ---------------------------------------------------------------|
12875      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
12876      *                   |-------------|-------------|----------------|
12877      *                   |  [240, 240] |     240     |  [240, 240]    |
12878      * ---------------------------------------------------------------|
12879      *     Video stream is present in configure_streams               |
12880      * ---------------------------------------------------------------|
12881      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12882      *                   |             |             | vid_min/max_fps|
12883      * ---------------------------------------------------------------|
12884      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
12885      * (camcorder prev   |-------------|-------------|----------------|
12886      *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
12887      *  is stopped)      |             |             |                |
12888      * ---------------------------------------------------------------|
12889      *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
12890      *                   |-------------|-------------|----------------|
12891      *                   |  [240, 240] |     240     |  [240, 240]    |
12892      * ---------------------------------------------------------------|
12893      * When Video stream is absent in configure_streams,
12894      * preview fps = sensor_fps / batchsize
12895      * Eg: for 240fps at batchSize 4, preview = 60fps
12896      *     for 120fps at batchSize 4, preview = 30fps
12897      *
12898      * When video stream is present in configure_streams, preview fps is as per
12899      * the ratio of preview buffers to video buffers requested in process
12900      * capture request
12901      */
12902     mBatchSize = 0;
12903     if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12904         fps_range.min_fps = fps_range.video_max_fps;
12905         fps_range.video_min_fps = fps_range.video_max_fps;
12906         int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12907                 fps_range.max_fps);
12908         if (NAME_NOT_FOUND != val) {
12909             cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12910             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12911                 return BAD_VALUE;
12912             }
12913 
12914             if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12915                 /* If batchmode is currently in progress and the fps changes,
12916                  * set the flag to restart the sensor */
12917                 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12918                         (mHFRVideoFps != fps_range.max_fps)) {
12919                     mNeedSensorRestart = true;
12920                 }
12921                 mHFRVideoFps = fps_range.max_fps;
12922                 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12923                 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12924                     mBatchSize = MAX_HFR_BATCH_SIZE;
12925                 }
12926              }
12927             LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12928 
12929          }
12930     } else {
12931         /* HFR mode is session param in backend/ISP. This should be reset when
12932          * in non-HFR mode  */
12933         cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12934         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12935             return BAD_VALUE;
12936         }
12937     }
12938     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12939         return BAD_VALUE;
12940     }
12941     LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12942             fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12943     return rc;
12944 }
12945 
12946 /*===========================================================================
12947  * FUNCTION   : translateToHalMetadata
12948  *
12949  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12950  *
12951  *
12952  * PARAMETERS :
12953  *   @request  : request sent from framework
12954  *   @hal_metadata: Hal specific metadata buffer
12955  *   @snapshotStreamId: Snapshot stream ID.
12956  *
12957  * RETURN     : success: NO_ERROR
12958  *              failure:
12959  *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)12960 int QCamera3HardwareInterface::translateToHalMetadata
12961                                   (const camera3_capture_request_t *request,
12962                                    metadata_buffer_t *hal_metadata,
12963                                    uint32_t snapshotStreamId) {
12964     if (request == nullptr || hal_metadata == nullptr) {
12965         return BAD_VALUE;
12966     }
12967 
12968     int64_t minFrameDuration = getMinFrameDuration(request);
12969 
12970     return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12971             minFrameDuration);
12972 }
12973 
translateFwkMetadataToHalMetadata(const camera_metadata_t * frameworkMetadata,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId,int64_t minFrameDuration)12974 int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12975         const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12976         uint32_t snapshotStreamId, int64_t minFrameDuration) {
12977 
12978     int rc = 0;
12979     CameraMetadata frame_settings;
12980     frame_settings = frameworkMetadata;
12981 
12982     /* Do not change the order of the following list unless you know what you are
12983      * doing.
12984      * The order is laid out in such a way that parameters in the front of the table
12985      * may be used to override the parameters later in the table. Examples are:
12986      * 1. META_MODE should precede AEC/AWB/AF MODE
12987      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12988      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12989      * 4. Any mode should precede it's corresponding settings
12990      */
12991     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12992         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12993         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12994             rc = BAD_VALUE;
12995         }
12996         rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12997         if (rc != NO_ERROR) {
12998             LOGE("extractSceneMode failed");
12999         }
13000     }
13001 
13002     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
13003         uint8_t fwk_aeMode =
13004             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
13005         uint8_t aeMode;
13006         int32_t redeye;
13007 
13008         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
13009             aeMode = CAM_AE_MODE_OFF;
13010         } else if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH) {
13011             aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
13012         } else {
13013             aeMode = CAM_AE_MODE_ON;
13014         }
13015         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
13016             redeye = 1;
13017         } else {
13018             redeye = 0;
13019         }
13020 
13021         int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
13022                 fwk_aeMode);
13023         if (NAME_NOT_FOUND != val) {
13024             int32_t flashMode = (int32_t)val;
13025             ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
13026         }
13027 
13028         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
13029         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
13030             rc = BAD_VALUE;
13031         }
13032     }
13033 
13034     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
13035         uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
13036         int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
13037                 fwk_whiteLevel);
13038         if (NAME_NOT_FOUND != val) {
13039             uint8_t whiteLevel = (uint8_t)val;
13040             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
13041                 rc = BAD_VALUE;
13042             }
13043         }
13044     }
13045 
13046     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
13047         uint8_t fwk_cacMode =
13048                 frame_settings.find(
13049                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
13050         int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
13051                 fwk_cacMode);
13052         if (NAME_NOT_FOUND != val) {
13053             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
13054             bool entryAvailable = FALSE;
13055             // Check whether Frameworks set CAC mode is supported in device or not
13056             for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
13057                 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
13058                     entryAvailable = TRUE;
13059                     break;
13060                 }
13061             }
13062             LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
13063             // If entry not found then set the device supported mode instead of frameworks mode i.e,
13064             // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
13065             // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
13066             if (entryAvailable == FALSE) {
13067                 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
13068                     cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
13069                 } else {
13070                     if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
13071                         // High is not supported and so set the FAST as spec say's underlying
13072                         // device implementation can be the same for both modes.
13073                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
13074                     } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
13075                         // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
13076                         // in order to avoid the fps drop due to high quality
13077                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
13078                     } else {
13079                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
13080                     }
13081                 }
13082             }
13083             LOGD("Final cacMode is %d", cacMode);
13084             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
13085                 rc = BAD_VALUE;
13086             }
13087         } else {
13088             LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
13089         }
13090     }
13091 
13092     uint8_t fwk_focusMode = 0;
13093     if (m_bForceInfinityAf == 0) {
13094         if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
13095             fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
13096             int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
13097                     fwk_focusMode);
13098             if (NAME_NOT_FOUND != val) {
13099                 uint8_t focusMode = (uint8_t)val;
13100                 LOGD("set focus mode %d", focusMode);
13101                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13102                          CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
13103                     rc = BAD_VALUE;
13104                 }
13105             }
13106         } else {
13107             LOGE("Fatal: Missing ANDROID_CONTROL_AF_MODE");
13108         }
13109     } else {
13110         uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
13111         LOGE("Focus forced to infinity %d", focusMode);
13112         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
13113             rc = BAD_VALUE;
13114         }
13115     }
13116 
13117     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
13118             fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
13119         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
13120         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
13121                 focalDistance)) {
13122             rc = BAD_VALUE;
13123         }
13124     }
13125 
13126     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
13127         uint8_t fwk_antibandingMode =
13128                 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
13129         int val = lookupHalName(ANTIBANDING_MODES_MAP,
13130                 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
13131         if (NAME_NOT_FOUND != val) {
13132             uint32_t hal_antibandingMode = (uint32_t)val;
13133             if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
13134                 if (m60HzZone) {
13135                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
13136                 } else {
13137                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
13138                 }
13139             }
13140             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
13141                     hal_antibandingMode)) {
13142                 rc = BAD_VALUE;
13143             }
13144         }
13145     }
13146 
13147     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
13148         int32_t expCompensation = frame_settings.find(
13149                 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
13150         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
13151             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
13152         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
13153             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
13154         LOGD("Setting compensation:%d", expCompensation);
13155         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
13156                 expCompensation)) {
13157             rc = BAD_VALUE;
13158         }
13159     }
13160 
13161     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
13162         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
13163         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
13164             rc = BAD_VALUE;
13165         }
13166     }
13167     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
13168         rc = setHalFpsRange(frame_settings, hal_metadata);
13169         if (rc != NO_ERROR) {
13170             LOGE("setHalFpsRange failed");
13171         }
13172     }
13173 
13174     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
13175         uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
13176         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
13177             rc = BAD_VALUE;
13178         }
13179     }
13180 
13181     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
13182         uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
13183         int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
13184                 fwk_effectMode);
13185         if (NAME_NOT_FOUND != val) {
13186             uint8_t effectMode = (uint8_t)val;
13187             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
13188                 rc = BAD_VALUE;
13189             }
13190         }
13191     }
13192 
13193     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
13194         uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
13195         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
13196                 colorCorrectMode)) {
13197             rc = BAD_VALUE;
13198         }
13199     }
13200 
13201     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
13202         cam_color_correct_gains_t colorCorrectGains;
13203         for (size_t i = 0; i < CC_GAIN_MAX; i++) {
13204             colorCorrectGains.gains[i] =
13205                     frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
13206         }
13207         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
13208                 colorCorrectGains)) {
13209             rc = BAD_VALUE;
13210         }
13211     }
13212 
13213     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
13214         cam_color_correct_matrix_t colorCorrectTransform;
13215         cam_rational_type_t transform_elem;
13216         size_t num = 0;
13217         for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
13218            for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
13219               transform_elem.numerator =
13220                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
13221               transform_elem.denominator =
13222                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
13223               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
13224               num++;
13225            }
13226         }
13227         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
13228                 colorCorrectTransform)) {
13229             rc = BAD_VALUE;
13230         }
13231     }
13232 
13233     cam_trigger_t aecTrigger;
13234     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
13235     aecTrigger.trigger_id = -1;
13236     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
13237         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
13238         aecTrigger.trigger =
13239             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
13240         aecTrigger.trigger_id =
13241             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
13242         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
13243                 aecTrigger)) {
13244             rc = BAD_VALUE;
13245         }
13246         LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
13247                 aecTrigger.trigger, aecTrigger.trigger_id);
13248     }
13249 
13250     /*af_trigger must come with a trigger id*/
13251     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
13252         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
13253         cam_trigger_t af_trigger;
13254         af_trigger.trigger =
13255             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
13256         af_trigger.trigger_id =
13257             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
13258         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
13259             rc = BAD_VALUE;
13260         }
13261         LOGD("AfTrigger: %d AfTriggerID: %d",
13262                 af_trigger.trigger, af_trigger.trigger_id);
13263     }
13264 
13265     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
13266         int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
13267         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
13268             rc = BAD_VALUE;
13269         }
13270     }
13271     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
13272         cam_edge_application_t edge_application;
13273         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
13274 
13275         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
13276             edge_application.sharpness = 0;
13277         } else {
13278             edge_application.sharpness =
13279                     gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
13280             if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
13281                 int32_t sharpness =
13282                         frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
13283                 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
13284                     sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
13285                     LOGD("Setting edge mode sharpness %d", sharpness);
13286                     edge_application.sharpness = sharpness;
13287                 }
13288             }
13289         }
13290         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
13291             rc = BAD_VALUE;
13292         }
13293     }
13294 
13295     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
13296         uint32_t flashMode = (uint32_t)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
13297         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_MODE, flashMode)) {
13298             rc = BAD_VALUE;
13299         }
13300 
13301         int32_t respectFlashMode = 1;
13302         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
13303             uint8_t fwk_aeMode =
13304                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
13305             if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
13306                     fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
13307                     fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
13308                 respectFlashMode = 0;
13309                 LOGH("AE Mode controls flash, ignore android.flash.mode");
13310             }
13311         }
13312         if (respectFlashMode) {
13313             int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
13314                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
13315             LOGH("flash mode after mapping %d", val);
13316             // To check: CAM_INTF_META_FLASH_MODE usage
13317             if (NAME_NOT_FOUND != val) {
13318                 uint8_t ledMode = (uint8_t)val;
13319                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, ledMode)) {
13320                     rc = BAD_VALUE;
13321                 }
13322             }
13323         }
13324     }
13325 
13326     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
13327         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.i32[0];
13328         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_STATE, flashState)) {
13329             rc = BAD_VALUE;
13330         }
13331     }
13332 
13333     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
13334         uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
13335         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
13336             rc = BAD_VALUE;
13337         }
13338     }
13339 
13340     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
13341         int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
13342         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
13343                 flashFiringTime)) {
13344             rc = BAD_VALUE;
13345         }
13346     }
13347 
13348     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
13349         uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
13350         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
13351                 hotPixelMode)) {
13352             rc = BAD_VALUE;
13353         }
13354     }
13355 
13356     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
13357         float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
13358         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
13359                 lensAperture)) {
13360             rc = BAD_VALUE;
13361         }
13362     }
13363 
13364     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
13365         float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
13366         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
13367                 filterDensity)) {
13368             rc = BAD_VALUE;
13369         }
13370     }
13371 
13372     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
13373         float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
13374         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
13375                 focalLength)) {
13376             rc = BAD_VALUE;
13377         }
13378     }
13379 
13380     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
13381         uint8_t optStabMode =
13382                 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
13383         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
13384                 optStabMode)) {
13385             rc = BAD_VALUE;
13386         }
13387     }
13388 
13389     if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
13390         uint8_t videoStabMode =
13391                 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
13392         LOGD("videoStabMode from APP = %d", videoStabMode);
13393         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
13394                 videoStabMode)) {
13395             rc = BAD_VALUE;
13396         }
13397     }
13398 
13399 
13400     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
13401         uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
13402         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
13403                 noiseRedMode)) {
13404             rc = BAD_VALUE;
13405         }
13406     }
13407 
13408     if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
13409         float reprocessEffectiveExposureFactor =
13410             frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
13411         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
13412                 reprocessEffectiveExposureFactor)) {
13413             rc = BAD_VALUE;
13414         }
13415     }
13416 
13417     cam_crop_region_t scalerCropRegion;
13418     bool scalerCropSet = false;
13419     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
13420         scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
13421         scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
13422         scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
13423         scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
13424 
13425         // Map coordinate system from active array to sensor output.
13426         mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
13427                 scalerCropRegion.width, scalerCropRegion.height);
13428 
13429         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
13430                 scalerCropRegion)) {
13431             rc = BAD_VALUE;
13432         }
13433         scalerCropSet = true;
13434     }
13435 
13436     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
13437         int64_t sensorExpTime =
13438                 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
13439         LOGD("setting sensorExpTime %lld", sensorExpTime);
13440         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
13441                 sensorExpTime)) {
13442             rc = BAD_VALUE;
13443         }
13444     }
13445 
13446     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
13447         int64_t sensorFrameDuration =
13448                 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
13449         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
13450         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
13451             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
13452         LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
13453         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
13454                 sensorFrameDuration)) {
13455             rc = BAD_VALUE;
13456         }
13457     }
13458 
13459     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
13460         int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
13461         if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
13462                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
13463         if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
13464                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
13465         LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
13466         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
13467                 sensorSensitivity)) {
13468             rc = BAD_VALUE;
13469         }
13470     }
13471 
13472 #ifndef USE_HAL_3_3
13473     if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
13474         int32_t ispSensitivity =
13475             frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
13476         if (ispSensitivity <
13477             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
13478                 ispSensitivity =
13479                     gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
13480                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
13481         }
13482         if (ispSensitivity >
13483             gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
13484                 ispSensitivity =
13485                     gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
13486                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
13487         }
13488         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
13489                 ispSensitivity)) {
13490             rc = BAD_VALUE;
13491         }
13492     }
13493 #endif
13494 
13495     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
13496         uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
13497         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
13498             rc = BAD_VALUE;
13499         }
13500     }
13501 
13502     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
13503         uint8_t fwk_facedetectMode =
13504                 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
13505 
13506         int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
13507                 fwk_facedetectMode);
13508 
13509         if (NAME_NOT_FOUND != val) {
13510             uint8_t facedetectMode = (uint8_t)val;
13511             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
13512                     facedetectMode)) {
13513                 rc = BAD_VALUE;
13514             }
13515         }
13516     }
13517 
13518     if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
13519         uint8_t histogramMode =
13520                 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
13521         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13522                 histogramMode)) {
13523             rc = BAD_VALUE;
13524         }
13525     }
13526 
13527     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
13528         uint8_t sharpnessMapMode =
13529                 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
13530         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
13531                 sharpnessMapMode)) {
13532             rc = BAD_VALUE;
13533         }
13534     }
13535 
13536     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
13537         uint8_t tonemapMode =
13538                 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
13539         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
13540             rc = BAD_VALUE;
13541         }
13542     }
13543     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
13544     /*All tonemap channels will have the same number of points*/
13545     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
13546         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
13547         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
13548         cam_rgb_tonemap_curves tonemapCurves;
13549         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
13550         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
13551             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
13552                      tonemapCurves.tonemap_points_cnt,
13553                     CAM_MAX_TONEMAP_CURVE_SIZE);
13554             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
13555         }
13556 
13557         /* ch0 = G*/
13558         size_t point = 0;
13559         cam_tonemap_curve_t tonemapCurveGreen;
13560         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
13561             for (size_t j = 0; j < 2; j++) {
13562                tonemapCurveGreen.tonemap_points[i][j] =
13563                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
13564                point++;
13565             }
13566         }
13567         tonemapCurves.curves[0] = tonemapCurveGreen;
13568 
13569         /* ch 1 = B */
13570         point = 0;
13571         cam_tonemap_curve_t tonemapCurveBlue;
13572         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
13573             for (size_t j = 0; j < 2; j++) {
13574                tonemapCurveBlue.tonemap_points[i][j] =
13575                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
13576                point++;
13577             }
13578         }
13579         tonemapCurves.curves[1] = tonemapCurveBlue;
13580 
13581         /* ch 2 = R */
13582         point = 0;
13583         cam_tonemap_curve_t tonemapCurveRed;
13584         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
13585             for (size_t j = 0; j < 2; j++) {
13586                tonemapCurveRed.tonemap_points[i][j] =
13587                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
13588                point++;
13589             }
13590         }
13591         tonemapCurves.curves[2] = tonemapCurveRed;
13592 
13593         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
13594                 tonemapCurves)) {
13595             rc = BAD_VALUE;
13596         }
13597     }
13598 
13599     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
13600         uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
13601         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
13602                 captureIntent)) {
13603             rc = BAD_VALUE;
13604         }
13605     }
13606 
13607     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
13608         uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
13609         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
13610                 blackLevelLock)) {
13611             rc = BAD_VALUE;
13612         }
13613     }
13614 
13615     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
13616         uint8_t lensShadingMapMode =
13617                 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
13618         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
13619                 lensShadingMapMode)) {
13620             rc = BAD_VALUE;
13621         }
13622     }
13623 
13624     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
13625         cam_area_t roi;
13626         bool reset = true;
13627         convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
13628 
13629         // Map coordinate system from active array to sensor output.
13630         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
13631                 roi.rect.height);
13632 
13633         if (scalerCropSet) {
13634             reset = resetIfNeededROI(&roi, &scalerCropRegion);
13635         }
13636         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
13637             rc = BAD_VALUE;
13638         }
13639     }
13640 
13641     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
13642         cam_area_t roi;
13643         bool reset = true;
13644         convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
13645 
13646         // Map coordinate system from active array to sensor output.
13647         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
13648                 roi.rect.height);
13649 
13650         if (scalerCropSet) {
13651             reset = resetIfNeededROI(&roi, &scalerCropRegion);
13652         }
13653         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
13654             rc = BAD_VALUE;
13655         }
13656     }
13657 
13658     // CDS for non-HFR non-video mode
13659     if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
13660             !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
13661         int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
13662         if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
13663             LOGE("Invalid CDS mode %d!", *fwk_cds);
13664         } else {
13665             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13666                     CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
13667                 rc = BAD_VALUE;
13668             }
13669         }
13670     }
13671 
13672     // Video HDR
13673     cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
13674     if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
13675         vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
13676     }
13677     if (m_bVideoHdrEnabled)
13678         vhdr = CAM_VIDEO_HDR_MODE_ON;
13679 
13680     int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
13681 
13682     if(vhdr != curr_hdr_state)
13683         LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
13684 
13685     rc = setVideoHdrMode(mParameters, vhdr);
13686     if (rc != NO_ERROR) {
13687         LOGE("setVideoHDR is failed");
13688     }
13689 
13690     //IR
13691     if(frame_settings.exists(QCAMERA3_IR_MODE)) {
13692         cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
13693                 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
13694         uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
13695         uint8_t isIRon = 0;
13696 
13697         (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
13698         if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
13699             LOGE("Invalid IR mode %d!", fwk_ir);
13700         } else {
13701             if(isIRon != curr_ir_state )
13702                LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
13703 
13704             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13705                     CAM_INTF_META_IR_MODE, fwk_ir)) {
13706                 rc = BAD_VALUE;
13707             }
13708         }
13709     }
13710 
13711     //Binning Correction Mode
13712     if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
13713         cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
13714                 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
13715         if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
13716                 || (0 > fwk_binning_correction)) {
13717             LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
13718         } else {
13719             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13720                     CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
13721                 rc = BAD_VALUE;
13722             }
13723         }
13724     }
13725 
13726     if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
13727         float aec_speed;
13728         aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
13729         LOGD("AEC Speed :%f", aec_speed);
13730         if ( aec_speed < 0 ) {
13731             LOGE("Invalid AEC mode %f!", aec_speed);
13732         } else {
13733             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
13734                     aec_speed)) {
13735                 rc = BAD_VALUE;
13736             }
13737         }
13738     }
13739 
13740     if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
13741         float awb_speed;
13742         awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
13743         LOGD("AWB Speed :%f", awb_speed);
13744         if ( awb_speed < 0 ) {
13745             LOGE("Invalid AWB mode %f!", awb_speed);
13746         } else {
13747             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
13748                     awb_speed)) {
13749                 rc = BAD_VALUE;
13750             }
13751         }
13752     }
13753 
13754     // TNR
13755     if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
13756         frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
13757         uint8_t b_TnrRequested = 0;
13758         uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
13759         cam_denoise_param_t tnr;
13760         tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
13761         tnr.process_plates =
13762             (cam_denoise_process_type_t)frame_settings.find(
13763             QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
13764         b_TnrRequested = tnr.denoise_enable;
13765 
13766         if(b_TnrRequested != curr_tnr_state)
13767            LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
13768 
13769         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
13770             rc = BAD_VALUE;
13771         }
13772     }
13773 
13774     if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
13775         int32_t* exposure_metering_mode =
13776                 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
13777         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
13778                 *exposure_metering_mode)) {
13779             rc = BAD_VALUE;
13780         }
13781     }
13782 
13783     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
13784         int32_t fwk_testPatternMode =
13785                 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
13786         int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
13787                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
13788 
13789         if (NAME_NOT_FOUND != testPatternMode) {
13790             cam_test_pattern_data_t testPatternData;
13791             memset(&testPatternData, 0, sizeof(testPatternData));
13792             testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
13793             if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
13794                     frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
13795                 int32_t *fwk_testPatternData =
13796                         frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
13797                 testPatternData.r = fwk_testPatternData[0];
13798                 testPatternData.b = fwk_testPatternData[3];
13799                 switch (gCamCapability[mCameraId]->color_arrangement) {
13800                     case CAM_FILTER_ARRANGEMENT_RGGB:
13801                     case CAM_FILTER_ARRANGEMENT_GRBG:
13802                         testPatternData.gr = fwk_testPatternData[1];
13803                         testPatternData.gb = fwk_testPatternData[2];
13804                         break;
13805                     case CAM_FILTER_ARRANGEMENT_GBRG:
13806                     case CAM_FILTER_ARRANGEMENT_BGGR:
13807                         testPatternData.gr = fwk_testPatternData[2];
13808                         testPatternData.gb = fwk_testPatternData[1];
13809                         break;
13810                     default:
13811                         LOGE("color arrangement %d is not supported",
13812                                 gCamCapability[mCameraId]->color_arrangement);
13813                         break;
13814                 }
13815             }
13816             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
13817                     testPatternData)) {
13818                 rc = BAD_VALUE;
13819             }
13820         } else {
13821             LOGE("Invalid framework sensor test pattern mode %d",
13822                     fwk_testPatternMode);
13823         }
13824     }
13825 
13826     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
13827         size_t count = 0;
13828         camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
13829         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
13830                 gps_coords.data.d, gps_coords.count, count);
13831         if (gps_coords.count != count) {
13832             rc = BAD_VALUE;
13833         }
13834     }
13835 
13836     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
13837         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
13838         size_t count = 0;
13839         const char *gps_methods_src = (const char *)
13840                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
13841         memset(gps_methods, '\0', sizeof(gps_methods));
13842         strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
13843         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13844                 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13845         if (GPS_PROCESSING_METHOD_SIZE != count) {
13846             rc = BAD_VALUE;
13847         }
13848     }
13849 
13850     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13851         int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13852         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13853                 gps_timestamp)) {
13854             rc = BAD_VALUE;
13855         }
13856     }
13857 
13858     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13859         int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13860         cam_rotation_info_t rotation_info;
13861         if (orientation == 0) {
13862            rotation_info.rotation = ROTATE_0;
13863         } else if (orientation == 90) {
13864            rotation_info.rotation = ROTATE_90;
13865         } else if (orientation == 180) {
13866            rotation_info.rotation = ROTATE_180;
13867         } else if (orientation == 270) {
13868            rotation_info.rotation = ROTATE_270;
13869         }
13870         rotation_info.device_rotation = ROTATE_0;
13871         rotation_info.streamId = snapshotStreamId;
13872         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13873         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13874             rc = BAD_VALUE;
13875         }
13876     }
13877 
13878     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13879         uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13880         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13881             rc = BAD_VALUE;
13882         }
13883     }
13884 
13885     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13886         uint32_t thumb_quality = (uint32_t)
13887                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13888         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13889                 thumb_quality)) {
13890             rc = BAD_VALUE;
13891         }
13892     }
13893 
13894     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13895         cam_dimension_t dim;
13896         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13897         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13898         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13899             rc = BAD_VALUE;
13900         }
13901     }
13902 
13903     // Internal metadata
13904     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13905         size_t count = 0;
13906         camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13907         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13908                 privatedata.data.i32, privatedata.count, count);
13909         if (privatedata.count != count) {
13910             rc = BAD_VALUE;
13911         }
13912     }
13913 
13914     // ISO/Exposure Priority
13915     if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13916         frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13917         cam_priority_mode_t mode =
13918                 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13919         if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13920             cam_intf_parm_manual_3a_t use_iso_exp_pty;
13921             use_iso_exp_pty.previewOnly = FALSE;
13922             uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13923             use_iso_exp_pty.value = *ptr;
13924 
13925             if(CAM_ISO_PRIORITY == mode) {
13926                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13927                         use_iso_exp_pty)) {
13928                     rc = BAD_VALUE;
13929                 }
13930             }
13931             else {
13932                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13933                         use_iso_exp_pty)) {
13934                     rc = BAD_VALUE;
13935                 }
13936             }
13937 
13938             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13939                     rc = BAD_VALUE;
13940             }
13941         }
13942     } else {
13943         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13944             rc = BAD_VALUE;
13945         }
13946     }
13947 
13948     // Saturation
13949     if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13950         int32_t* use_saturation =
13951                 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13952         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13953             rc = BAD_VALUE;
13954         }
13955     }
13956 
13957     // EV step
13958     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13959             gCamCapability[mCameraId]->exp_compensation_step)) {
13960         rc = BAD_VALUE;
13961     }
13962 
13963     // CDS info
13964     if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13965         cam_cds_data_t *cdsData = (cam_cds_data_t *)
13966                 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13967 
13968         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13969                 CAM_INTF_META_CDS_DATA, *cdsData)) {
13970             rc = BAD_VALUE;
13971         }
13972     }
13973 
13974     // Hybrid AE
13975     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13976         uint8_t *hybrid_ae = (uint8_t *)
13977                 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
13978         // Motion tracking intent isn't compatible with hybrid ae.
13979         if (mCaptureIntent == CAM_INTENT_MOTION_TRACKING) {
13980             *hybrid_ae = 0;
13981         }
13982         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13983             rc = BAD_VALUE;
13984         }
13985     }
13986 
13987     // Motion Detection
13988     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE)) {
13989         uint8_t *motion_detection = (uint8_t *)
13990                 frame_settings.find(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE).data.u8;
13991         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MOTION_DETECTION_ENABLE, *motion_detection)) {
13992             rc = BAD_VALUE;
13993         }
13994     }
13995 
13996     // Histogram
13997     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13998         uint8_t histogramMode =
13999                 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
14000         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
14001                 histogramMode)) {
14002             rc = BAD_VALUE;
14003         }
14004     }
14005 
14006     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
14007         int32_t histogramBins =
14008                  frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
14009         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
14010                 histogramBins)) {
14011             rc = BAD_VALUE;
14012         }
14013     }
14014 
14015     // Tracking AF
14016     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
14017         uint8_t trackingAfTrigger =
14018                 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
14019         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
14020                 trackingAfTrigger)) {
14021             rc = BAD_VALUE;
14022         }
14023     }
14024 
14025     // Makernote
14026     camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
14027     if (entry.count != 0) {
14028         if (entry.count <= MAX_MAKERNOTE_LENGTH) {
14029             cam_makernote_t makernote;
14030             makernote.length = entry.count;
14031             memcpy(makernote.data, entry.data.u8, makernote.length);
14032             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
14033                 rc = BAD_VALUE;
14034             }
14035         } else {
14036             ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
14037                     MAX_MAKERNOTE_LENGTH);
14038             rc = BAD_VALUE;
14039         }
14040     }
14041 
14042     return rc;
14043 }
14044 
14045 /*===========================================================================
14046  * FUNCTION   : captureResultCb
14047  *
14048  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
14049  *
14050  * PARAMETERS :
14051  *   @frame  : frame information from mm-camera-interface
14052  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
14053  *   @userdata: userdata
14054  *
14055  * RETURN     : NONE
14056  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer,void * userdata)14057 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
14058                 camera3_stream_buffer_t *buffer,
14059                 uint32_t frame_number, bool isInputBuffer, void *userdata)
14060 {
14061     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
14062     if (hw == NULL) {
14063         LOGE("Invalid hw %p", hw);
14064         return;
14065     }
14066 
14067     hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
14068     return;
14069 }
14070 
14071 /*===========================================================================
14072  * FUNCTION   : setBufferErrorStatus
14073  *
14074  * DESCRIPTION: Callback handler for channels to report any buffer errors
14075  *
14076  * PARAMETERS :
14077  *   @ch     : Channel on which buffer error is reported from
14078  *   @frame_number  : frame number on which buffer error is reported on
14079  *   @buffer_status : buffer error status
14080  *   @userdata: userdata
14081  *
14082  * RETURN     : NONE
14083  *==========================================================================*/
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frame_number,camera3_buffer_status_t err,void * userdata)14084 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
14085         uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
14086 {
14087     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
14088     if (hw == NULL) {
14089         LOGE("Invalid hw %p", hw);
14090         return;
14091     }
14092 
14093     hw->setBufferErrorStatus(ch, frame_number, err);
14094     return;
14095 }
14096 
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frameNumber,camera3_buffer_status_t err)14097 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
14098         uint32_t frameNumber, camera3_buffer_status_t err)
14099 {
14100     LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
14101     pthread_mutex_lock(&mMutex);
14102 
14103     for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
14104         if (req.frame_number != frameNumber)
14105             continue;
14106         for (auto& k : req.mPendingBufferList) {
14107             if(k.stream->priv == ch) {
14108                 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
14109             }
14110         }
14111     }
14112 
14113     pthread_mutex_unlock(&mMutex);
14114     return;
14115 }
14116 /*===========================================================================
14117  * FUNCTION   : initialize
14118  *
14119  * DESCRIPTION: Pass framework callback pointers to HAL
14120  *
14121  * PARAMETERS :
14122  *
14123  *
14124  * RETURN     : Success : 0
14125  *              Failure: -ENODEV
14126  *==========================================================================*/
14127 
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)14128 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
14129                                   const camera3_callback_ops_t *callback_ops)
14130 {
14131     LOGD("E");
14132     QCamera3HardwareInterface *hw =
14133         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14134     if (!hw) {
14135         LOGE("NULL camera device");
14136         return -ENODEV;
14137     }
14138 
14139     int rc = hw->initialize(callback_ops);
14140     LOGD("X");
14141     return rc;
14142 }
14143 
14144 /*===========================================================================
14145  * FUNCTION   : configure_streams
14146  *
14147  * DESCRIPTION:
14148  *
14149  * PARAMETERS :
14150  *
14151  *
14152  * RETURN     : Success: 0
14153  *              Failure: -EINVAL (if stream configuration is invalid)
14154  *                       -ENODEV (fatal error)
14155  *==========================================================================*/
14156 
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)14157 int QCamera3HardwareInterface::configure_streams(
14158         const struct camera3_device *device,
14159         camera3_stream_configuration_t *stream_list)
14160 {
14161     LOGD("E");
14162     QCamera3HardwareInterface *hw =
14163         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14164     if (!hw) {
14165         LOGE("NULL camera device");
14166         return -ENODEV;
14167     }
14168     int rc = hw->configureStreams(stream_list);
14169     LOGD("X");
14170     return rc;
14171 }
14172 
14173 /*===========================================================================
14174  * FUNCTION   : construct_default_request_settings
14175  *
14176  * DESCRIPTION: Configure a settings buffer to meet the required use case
14177  *
14178  * PARAMETERS :
14179  *
14180  *
14181  * RETURN     : Success: Return valid metadata
14182  *              Failure: Return NULL
14183  *==========================================================================*/
14184 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)14185     construct_default_request_settings(const struct camera3_device *device,
14186                                         int type)
14187 {
14188 
14189     LOGD("E");
14190     camera_metadata_t* fwk_metadata = NULL;
14191     QCamera3HardwareInterface *hw =
14192         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14193     if (!hw) {
14194         LOGE("NULL camera device");
14195         return NULL;
14196     }
14197 
14198     fwk_metadata = hw->translateCapabilityToMetadata(type);
14199 
14200     LOGD("X");
14201     return fwk_metadata;
14202 }
14203 
14204 /*===========================================================================
14205  * FUNCTION   : process_capture_request
14206  *
14207  * DESCRIPTION:
14208  *
14209  * PARAMETERS :
14210  *
14211  *
14212  * RETURN     :
14213  *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)14214 int QCamera3HardwareInterface::process_capture_request(
14215                     const struct camera3_device *device,
14216                     camera3_capture_request_t *request)
14217 {
14218     LOGD("E");
14219     CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
14220     QCamera3HardwareInterface *hw =
14221         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14222     if (!hw) {
14223         LOGE("NULL camera device");
14224         return -EINVAL;
14225     }
14226 
14227     int rc = hw->orchestrateRequest(request);
14228     LOGD("X");
14229     return rc;
14230 }
14231 
14232 /*===========================================================================
14233  * FUNCTION   : dump
14234  *
14235  * DESCRIPTION:
14236  *
14237  * PARAMETERS :
14238  *
14239  *
14240  * RETURN     :
14241  *==========================================================================*/
14242 
dump(const struct camera3_device * device,int fd)14243 void QCamera3HardwareInterface::dump(
14244                 const struct camera3_device *device, int fd)
14245 {
14246     /* Log level property is read when "adb shell dumpsys media.camera" is
14247        called so that the log level can be controlled without restarting
14248        the media server */
14249     getLogLevel();
14250 
14251     LOGD("E");
14252     QCamera3HardwareInterface *hw =
14253         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14254     if (!hw) {
14255         LOGE("NULL camera device");
14256         return;
14257     }
14258 
14259     hw->dump(fd);
14260     LOGD("X");
14261     return;
14262 }
14263 
14264 /*===========================================================================
14265  * FUNCTION   : flush
14266  *
14267  * DESCRIPTION:
14268  *
14269  * PARAMETERS :
14270  *
14271  *
14272  * RETURN     :
14273  *==========================================================================*/
14274 
flush(const struct camera3_device * device)14275 int QCamera3HardwareInterface::flush(
14276                 const struct camera3_device *device)
14277 {
14278     int rc;
14279     LOGD("E");
14280     QCamera3HardwareInterface *hw =
14281         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
14282     if (!hw) {
14283         LOGE("NULL camera device");
14284         return -EINVAL;
14285     }
14286 
14287     pthread_mutex_lock(&hw->mMutex);
14288     // Validate current state
14289     switch (hw->mState) {
14290         case STARTED:
14291             /* valid state */
14292             break;
14293 
14294         case ERROR:
14295             pthread_mutex_unlock(&hw->mMutex);
14296             hw->handleCameraDeviceError();
14297             return -ENODEV;
14298 
14299         default:
14300             LOGI("Flush returned during state %d", hw->mState);
14301             pthread_mutex_unlock(&hw->mMutex);
14302             return 0;
14303     }
14304     pthread_mutex_unlock(&hw->mMutex);
14305 
14306     rc = hw->flush(true /* restart channels */ );
14307     LOGD("X");
14308     return rc;
14309 }
14310 
14311 /*===========================================================================
14312  * FUNCTION   : close_camera_device
14313  *
14314  * DESCRIPTION:
14315  *
14316  * PARAMETERS :
14317  *
14318  *
14319  * RETURN     :
14320  *==========================================================================*/
close_camera_device(struct hw_device_t * device)14321 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
14322 {
14323     int ret = NO_ERROR;
14324     QCamera3HardwareInterface *hw =
14325         reinterpret_cast<QCamera3HardwareInterface *>(
14326             reinterpret_cast<camera3_device_t *>(device)->priv);
14327     if (!hw) {
14328         LOGE("NULL camera device");
14329         return BAD_VALUE;
14330     }
14331 
14332     LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
14333     delete hw;
14334     LOGI("[KPI Perf]: X");
14335     CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
14336     return ret;
14337 }
14338 
14339 /*===========================================================================
14340  * FUNCTION   : getWaveletDenoiseProcessPlate
14341  *
14342  * DESCRIPTION: query wavelet denoise process plate
14343  *
14344  * PARAMETERS : None
14345  *
14346  * RETURN     : WNR prcocess plate value
14347  *==========================================================================*/
getWaveletDenoiseProcessPlate()14348 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
14349 {
14350     char prop[PROPERTY_VALUE_MAX];
14351     memset(prop, 0, sizeof(prop));
14352     property_get("persist.denoise.process.plates", prop, "0");
14353     int processPlate = atoi(prop);
14354     switch(processPlate) {
14355     case 0:
14356         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
14357     case 1:
14358         return CAM_WAVELET_DENOISE_CBCR_ONLY;
14359     case 2:
14360         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14361     case 3:
14362         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
14363     default:
14364         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14365     }
14366 }
14367 
14368 
14369 /*===========================================================================
14370  * FUNCTION   : getTemporalDenoiseProcessPlate
14371  *
14372  * DESCRIPTION: query temporal denoise process plate
14373  *
14374  * PARAMETERS : None
14375  *
14376  * RETURN     : TNR prcocess plate value
14377  *==========================================================================*/
getTemporalDenoiseProcessPlate()14378 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
14379 {
14380     char prop[PROPERTY_VALUE_MAX];
14381     memset(prop, 0, sizeof(prop));
14382     property_get("persist.tnr.process.plates", prop, "0");
14383     int processPlate = atoi(prop);
14384     switch(processPlate) {
14385     case 0:
14386         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
14387     case 1:
14388         return CAM_WAVELET_DENOISE_CBCR_ONLY;
14389     case 2:
14390         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14391     case 3:
14392         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
14393     default:
14394         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
14395     }
14396 }
14397 
14398 
14399 /*===========================================================================
14400  * FUNCTION   : extractSceneMode
14401  *
14402  * DESCRIPTION: Extract scene mode from frameworks set metadata
14403  *
14404  * PARAMETERS :
14405  *      @frame_settings: CameraMetadata reference
14406  *      @metaMode: ANDROID_CONTORL_MODE
14407  *      @hal_metadata: hal metadata structure
14408  *
14409  * RETURN     : None
14410  *==========================================================================*/
extractSceneMode(const CameraMetadata & frame_settings,uint8_t metaMode,metadata_buffer_t * hal_metadata)14411 int32_t QCamera3HardwareInterface::extractSceneMode(
14412         const CameraMetadata &frame_settings, uint8_t metaMode,
14413         metadata_buffer_t *hal_metadata)
14414 {
14415     int32_t rc = NO_ERROR;
14416     uint8_t sceneMode = CAM_SCENE_MODE_OFF;
14417 
14418     if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
14419         LOGD("Ignoring control mode OFF_KEEP_STATE");
14420         return NO_ERROR;
14421     }
14422 
14423     if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
14424         camera_metadata_ro_entry entry =
14425                 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
14426         if (0 == entry.count)
14427             return rc;
14428 
14429         uint8_t fwk_sceneMode = entry.data.u8[0];
14430 
14431         int val = lookupHalName(SCENE_MODES_MAP,
14432                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
14433                 fwk_sceneMode);
14434         if (NAME_NOT_FOUND != val) {
14435             sceneMode = (uint8_t)val;
14436             LOGD("sceneMode: %d", sceneMode);
14437         }
14438     }
14439 
14440     if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
14441         rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
14442     }
14443 
14444     if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
14445         if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
14446             cam_hdr_param_t hdr_params;
14447             hdr_params.hdr_enable = 1;
14448             hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
14449             hdr_params.hdr_need_1x = false;
14450             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14451                     CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
14452                 rc = BAD_VALUE;
14453             }
14454         }
14455 
14456         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14457                 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
14458             rc = BAD_VALUE;
14459         }
14460     }
14461 
14462     if (mForceHdrSnapshot) {
14463         cam_hdr_param_t hdr_params;
14464         hdr_params.hdr_enable = 1;
14465         hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
14466         hdr_params.hdr_need_1x = false;
14467         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14468                 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
14469             rc = BAD_VALUE;
14470         }
14471     }
14472 
14473     return rc;
14474 }
14475 
14476 /*===========================================================================
14477  * FUNCTION   : setVideoHdrMode
14478  *
14479  * DESCRIPTION: Set Video HDR mode from frameworks set metadata
14480  *
14481  * PARAMETERS :
14482  *      @hal_metadata: hal metadata structure
14483  *      @metaMode: QCAMERA3_VIDEO_HDR_MODE
14484  *
14485  * RETURN     : None
14486  *==========================================================================*/
setVideoHdrMode(metadata_buffer_t * hal_metadata,cam_video_hdr_mode_t vhdr)14487 int32_t QCamera3HardwareInterface::setVideoHdrMode(
14488         metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
14489 {
14490     if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
14491         return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
14492     }
14493 
14494     LOGE("Invalid Video HDR mode %d!", vhdr);
14495     return BAD_VALUE;
14496 }
14497 
14498 /*===========================================================================
14499  * FUNCTION   : setSensorHDR
14500  *
14501  * DESCRIPTION: Enable/disable sensor HDR.
14502  *
14503  * PARAMETERS :
14504  *      @hal_metadata: hal metadata structure
14505  *      @enable: boolean whether to enable/disable sensor HDR
14506  *
14507  * RETURN     : None
14508  *==========================================================================*/
setSensorHDR(metadata_buffer_t * hal_metadata,bool enable,bool isVideoHdrEnable)14509 int32_t QCamera3HardwareInterface::setSensorHDR(
14510         metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
14511 {
14512     int32_t rc = NO_ERROR;
14513     cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
14514 
14515     if (enable) {
14516         char sensor_hdr_prop[PROPERTY_VALUE_MAX];
14517         memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
14518         #ifdef _LE_CAMERA_
14519         //Default to staggered HDR for IOT
14520         property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
14521         #else
14522         property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
14523         #endif
14524         sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
14525     }
14526 
14527     bool isSupported = false;
14528     switch (sensor_hdr) {
14529         case CAM_SENSOR_HDR_IN_SENSOR:
14530             if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
14531                     CAM_QCOM_FEATURE_SENSOR_HDR) {
14532                 isSupported = true;
14533                 LOGD("Setting HDR mode In Sensor");
14534             }
14535             break;
14536         case CAM_SENSOR_HDR_ZIGZAG:
14537             if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
14538                     CAM_QCOM_FEATURE_ZIGZAG_HDR) {
14539                 isSupported = true;
14540                 LOGD("Setting HDR mode Zigzag");
14541             }
14542             break;
14543         case CAM_SENSOR_HDR_STAGGERED:
14544             if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
14545                     CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
14546                 isSupported = true;
14547                 LOGD("Setting HDR mode Staggered");
14548             }
14549             break;
14550         case CAM_SENSOR_HDR_OFF:
14551             isSupported = true;
14552             LOGD("Turning off sensor HDR");
14553             break;
14554         default:
14555             LOGE("HDR mode %d not supported", sensor_hdr);
14556             rc = BAD_VALUE;
14557             break;
14558     }
14559 
14560     if(isSupported) {
14561         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
14562                 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
14563             rc = BAD_VALUE;
14564         } else {
14565             if(!isVideoHdrEnable)
14566                 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
14567         }
14568     }
14569     return rc;
14570 }
14571 
14572 /*===========================================================================
14573  * FUNCTION   : needRotationReprocess
14574  *
14575  * DESCRIPTION: if rotation needs to be done by reprocess in pp
14576  *
14577  * PARAMETERS : none
14578  *
14579  * RETURN     : true: needed
14580  *              false: no need
14581  *==========================================================================*/
needRotationReprocess()14582 bool QCamera3HardwareInterface::needRotationReprocess()
14583 {
14584     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
14585         // current rotation is not zero, and pp has the capability to process rotation
14586         LOGH("need do reprocess for rotation");
14587         return true;
14588     }
14589 
14590     return false;
14591 }
14592 
14593 /*===========================================================================
14594  * FUNCTION   : needReprocess
14595  *
14596  * DESCRIPTION: if reprocess in needed
14597  *
14598  * PARAMETERS : none
14599  *
14600  * RETURN     : true: needed
14601  *              false: no need
14602  *==========================================================================*/
needReprocess(cam_feature_mask_t postprocess_mask)14603 bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
14604 {
14605     if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
14606         // TODO: add for ZSL HDR later
14607         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
14608         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
14609             LOGH("need do reprocess for ZSL WNR or min PP reprocess");
14610             return true;
14611         } else {
14612             LOGH("already post processed frame");
14613             return false;
14614         }
14615     }
14616     return needRotationReprocess();
14617 }
14618 
14619 /*===========================================================================
14620  * FUNCTION   : needJpegExifRotation
14621  *
14622  * DESCRIPTION: if rotation from jpeg is needed
14623  *
14624  * PARAMETERS : none
14625  *
14626  * RETURN     : true: needed
14627  *              false: no need
14628  *==========================================================================*/
needJpegExifRotation()14629 bool QCamera3HardwareInterface::needJpegExifRotation()
14630 {
14631     /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
14632     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
14633        LOGD("Need use Jpeg EXIF Rotation");
14634        return true;
14635     }
14636     return false;
14637 }
14638 
14639 /*===========================================================================
14640  * FUNCTION   : addOfflineReprocChannel
14641  *
14642  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
14643  *              coming from input channel
14644  *
14645  * PARAMETERS :
14646  *   @config  : reprocess configuration
14647  *   @inputChHandle : pointer to the input (source) channel
14648  *
14649  *
14650  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
14651  *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3ProcessingChannel * inputChHandle)14652 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
14653         const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
14654 {
14655     int32_t rc = NO_ERROR;
14656     QCamera3ReprocessChannel *pChannel = NULL;
14657 
14658     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
14659             mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
14660             config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
14661     if (NULL == pChannel) {
14662         LOGE("no mem for reprocess channel");
14663         return NULL;
14664     }
14665 
14666     rc = pChannel->initialize(IS_TYPE_NONE);
14667     if (rc != NO_ERROR) {
14668         LOGE("init reprocess channel failed, ret = %d", rc);
14669         delete pChannel;
14670         return NULL;
14671     }
14672 
14673     // pp feature config
14674     cam_pp_feature_config_t pp_config;
14675     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
14676 
14677     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
14678     if (gCamCapability[mCameraId]->qcom_supported_feature_mask
14679             & CAM_QCOM_FEATURE_DSDN) {
14680         //Use CPP CDS incase h/w supports it.
14681         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
14682         pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
14683     }
14684     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
14685         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
14686     }
14687 
14688     if (config.hdr_param.hdr_enable) {
14689         pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
14690         pp_config.hdr_param = config.hdr_param;
14691     }
14692 
14693     if (mForceHdrSnapshot) {
14694         pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
14695         pp_config.hdr_param.hdr_enable = 1;
14696         pp_config.hdr_param.hdr_need_1x = 0;
14697         pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
14698     }
14699 
14700     rc = pChannel->addReprocStreamsFromSource(pp_config,
14701             config,
14702             IS_TYPE_NONE,
14703             mMetadataChannel);
14704 
14705     if (rc != NO_ERROR) {
14706         delete pChannel;
14707         return NULL;
14708     }
14709     return pChannel;
14710 }
14711 
14712 /*===========================================================================
14713  * FUNCTION   : getMobicatMask
14714  *
14715  * DESCRIPTION: returns mobicat mask
14716  *
14717  * PARAMETERS : none
14718  *
14719  * RETURN     : mobicat mask
14720  *
14721  *==========================================================================*/
getMobicatMask()14722 uint8_t QCamera3HardwareInterface::getMobicatMask()
14723 {
14724     return m_MobicatMask;
14725 }
14726 
14727 /*===========================================================================
14728  * FUNCTION   : setMobicat
14729  *
14730  * DESCRIPTION: set Mobicat on/off.
14731  *
14732  * PARAMETERS :
14733  *   @params  : none
14734  *
14735  * RETURN     : int32_t type of status
14736  *              NO_ERROR  -- success
14737  *              none-zero failure code
14738  *==========================================================================*/
setMobicat()14739 int32_t QCamera3HardwareInterface::setMobicat()
14740 {
14741     int32_t ret = NO_ERROR;
14742 
14743     if (m_MobicatMask) {
14744         tune_cmd_t tune_cmd;
14745         tune_cmd.type = SET_RELOAD_CHROMATIX;
14746         tune_cmd.module = MODULE_ALL;
14747         tune_cmd.value = TRUE;
14748         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14749                 CAM_INTF_PARM_SET_VFE_COMMAND,
14750                 tune_cmd);
14751 
14752         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14753                 CAM_INTF_PARM_SET_PP_COMMAND,
14754                 tune_cmd);
14755     }
14756 
14757     return ret;
14758 }
14759 
14760 /*===========================================================================
14761 * FUNCTION   : getLogLevel
14762 *
14763 * DESCRIPTION: Reads the log level property into a variable
14764 *
14765 * PARAMETERS :
14766 *   None
14767 *
14768 * RETURN     :
14769 *   None
14770 *==========================================================================*/
getLogLevel()14771 void QCamera3HardwareInterface::getLogLevel()
14772 {
14773     char prop[PROPERTY_VALUE_MAX];
14774     uint32_t globalLogLevel = 0;
14775 
14776     property_get("persist.camera.hal.debug", prop, "0");
14777     int val = atoi(prop);
14778     if (0 <= val) {
14779         gCamHal3LogLevel = (uint32_t)val;
14780     }
14781 
14782     property_get("persist.camera.kpi.debug", prop, "0");
14783     gKpiDebugLevel = atoi(prop);
14784 
14785     property_get("persist.camera.global.debug", prop, "0");
14786     val = atoi(prop);
14787     if (0 <= val) {
14788         globalLogLevel = (uint32_t)val;
14789     }
14790 
14791     /* Highest log level among hal.logs and global.logs is selected */
14792     if (gCamHal3LogLevel < globalLogLevel)
14793         gCamHal3LogLevel = globalLogLevel;
14794 
14795     return;
14796 }
14797 
14798 /*===========================================================================
14799  * FUNCTION   : validateStreamRotations
14800  *
14801  * DESCRIPTION: Check if the rotations requested are supported
14802  *
14803  * PARAMETERS :
14804  *   @stream_list : streams to be configured
14805  *
14806  * RETURN     : NO_ERROR on success
14807  *              -EINVAL on failure
14808  *
14809  *==========================================================================*/
validateStreamRotations(camera3_stream_configuration_t * streamList)14810 int QCamera3HardwareInterface::validateStreamRotations(
14811         camera3_stream_configuration_t *streamList)
14812 {
14813     int rc = NO_ERROR;
14814 
14815     /*
14816     * Loop through all streams requested in configuration
14817     * Check if unsupported rotations have been requested on any of them
14818     */
14819     for (size_t j = 0; j < streamList->num_streams; j++){
14820         camera3_stream_t *newStream = streamList->streams[j];
14821 
14822         switch(newStream->rotation) {
14823             case CAMERA3_STREAM_ROTATION_0:
14824             case CAMERA3_STREAM_ROTATION_90:
14825             case CAMERA3_STREAM_ROTATION_180:
14826             case CAMERA3_STREAM_ROTATION_270:
14827                 //Expected values
14828                 break;
14829             default:
14830                 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
14831                         "type:%d and stream format:%d", __func__,
14832                         newStream->rotation, newStream->stream_type,
14833                         newStream->format);
14834                 return -EINVAL;
14835         }
14836 
14837         bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
14838         bool isImplDef = (newStream->format ==
14839                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
14840         bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
14841                 isImplDef);
14842 
14843         if (isRotated && (!isImplDef || isZsl)) {
14844             LOGE("Error: Unsupported rotation of %d requested for stream"
14845                     "type:%d and stream format:%d",
14846                     newStream->rotation, newStream->stream_type,
14847                     newStream->format);
14848             rc = -EINVAL;
14849             break;
14850         }
14851     }
14852 
14853     return rc;
14854 }
14855 
14856 /*===========================================================================
14857 * FUNCTION   : getFlashInfo
14858 *
14859 * DESCRIPTION: Retrieve information about whether the device has a flash.
14860 *
14861 * PARAMETERS :
14862 *   @cameraId  : Camera id to query
14863 *   @hasFlash  : Boolean indicating whether there is a flash device
14864 *                associated with given camera
14865 *   @flashNode : If a flash device exists, this will be its device node.
14866 *
14867 * RETURN     :
14868 *   None
14869 *==========================================================================*/
getFlashInfo(const int cameraId,bool & hasFlash,char (& flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])14870 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14871         bool& hasFlash,
14872         char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14873 {
14874     cam_capability_t* camCapability = gCamCapability[cameraId];
14875     if (NULL == camCapability) {
14876         hasFlash = false;
14877         flashNode[0] = '\0';
14878     } else {
14879         hasFlash = camCapability->flash_available;
14880         strlcpy(flashNode,
14881                 (char*)camCapability->flash_dev_name,
14882                 QCAMERA_MAX_FILEPATH_LENGTH);
14883     }
14884 }
14885 
14886 /*===========================================================================
14887 * FUNCTION   : getEepromVersionInfo
14888 *
14889 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
14890 *
14891 * PARAMETERS : None
14892 *
14893 * RETURN     : string describing EEPROM version
14894 *              "\0" if no such info available
14895 *==========================================================================*/
getEepromVersionInfo()14896 const char *QCamera3HardwareInterface::getEepromVersionInfo()
14897 {
14898     return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14899 }
14900 
14901 /*===========================================================================
14902 * FUNCTION   : getLdafCalib
14903 *
14904 * DESCRIPTION: Retrieve Laser AF calibration data
14905 *
14906 * PARAMETERS : None
14907 *
14908 * RETURN     : Two uint32_t describing laser AF calibration data
14909 *              NULL if none is available.
14910 *==========================================================================*/
getLdafCalib()14911 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14912 {
14913     if (mLdafCalibExist) {
14914         return &mLdafCalib[0];
14915     } else {
14916         return NULL;
14917     }
14918 }
14919 
14920 /*===========================================================================
14921 * FUNCTION   : getEaselFwVersion
14922 *
14923 * DESCRIPTION: Retrieve Easel firmware version
14924 *
14925 * PARAMETERS : None
14926 *
14927 * RETURN     : string describing Firmware version
14928 *              "\0" if version is not up to date
14929 *==========================================================================*/
getEaselFwVersion()14930 const char *QCamera3HardwareInterface::getEaselFwVersion()
14931 {
14932     if (mEaselFwUpdated) {
14933         return (const char *)&mEaselFwVersion[0];
14934     } else {
14935         return NULL;
14936     }
14937 }
14938 
14939 /*===========================================================================
14940  * FUNCTION   : dynamicUpdateMetaStreamInfo
14941  *
14942  * DESCRIPTION: This function:
14943  *             (1) stops all the channels
14944  *             (2) returns error on pending requests and buffers
14945  *             (3) sends metastream_info in setparams
14946  *             (4) starts all channels
14947  *             This is useful when sensor has to be restarted to apply any
14948  *             settings such as frame rate from a different sensor mode
14949  *
14950  * PARAMETERS : None
14951  *
14952  * RETURN     : NO_ERROR on success
14953  *              Error codes on failure
14954  *
14955  *==========================================================================*/
dynamicUpdateMetaStreamInfo()14956 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14957 {
14958     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
14959     int rc = NO_ERROR;
14960 
14961     LOGD("E");
14962 
14963     rc = stopAllChannels();
14964     if (rc < 0) {
14965         LOGE("stopAllChannels failed");
14966         return rc;
14967     }
14968 
14969     rc = notifyErrorForPendingRequests();
14970     if (rc < 0) {
14971         LOGE("notifyErrorForPendingRequests failed");
14972         return rc;
14973     }
14974 
14975     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14976         LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14977                 "Format:%d",
14978                 mStreamConfigInfo.type[i],
14979                 mStreamConfigInfo.stream_sizes[i].width,
14980                 mStreamConfigInfo.stream_sizes[i].height,
14981                 mStreamConfigInfo.postprocess_mask[i],
14982                 mStreamConfigInfo.format[i]);
14983     }
14984 
14985     /* Send meta stream info once again so that ISP can start */
14986     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14987             CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14988     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14989             mParameters);
14990     if (rc < 0) {
14991         LOGE("set Metastreaminfo failed. Sensor mode does not change");
14992     }
14993 
14994     rc = startAllChannels();
14995     if (rc < 0) {
14996         LOGE("startAllChannels failed");
14997         return rc;
14998     }
14999 
15000     LOGD("X");
15001     return rc;
15002 }
15003 
15004 /*===========================================================================
15005  * FUNCTION   : stopAllChannels
15006  *
15007  * DESCRIPTION: This function stops (equivalent to stream-off) all channels
15008  *
15009  * PARAMETERS : None
15010  *
15011  * RETURN     : NO_ERROR on success
15012  *              Error codes on failure
15013  *
15014  *==========================================================================*/
stopAllChannels()15015 int32_t QCamera3HardwareInterface::stopAllChannels()
15016 {
15017     int32_t rc = NO_ERROR;
15018 
15019     LOGD("Stopping all channels");
15020     // Stop the Streams/Channels
15021     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
15022         it != mStreamInfo.end(); it++) {
15023         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
15024         if (channel) {
15025             channel->stop();
15026         }
15027         (*it)->status = INVALID;
15028     }
15029 
15030     if (mSupportChannel) {
15031         mSupportChannel->stop();
15032     }
15033     if (mAnalysisChannel) {
15034         mAnalysisChannel->stop();
15035     }
15036     if (mRawDumpChannel) {
15037         mRawDumpChannel->stop();
15038     }
15039     if (mHdrPlusRawSrcChannel) {
15040         mHdrPlusRawSrcChannel->stop();
15041     }
15042     if (mMetadataChannel) {
15043         /* If content of mStreamInfo is not 0, there is metadata stream */
15044         mMetadataChannel->stop();
15045     }
15046 
15047     LOGD("All channels stopped");
15048     return rc;
15049 }
15050 
15051 /*===========================================================================
15052  * FUNCTION   : startAllChannels
15053  *
15054  * DESCRIPTION: This function starts (equivalent to stream-on) all channels
15055  *
15056  * PARAMETERS : None
15057  *
15058  * RETURN     : NO_ERROR on success
15059  *              Error codes on failure
15060  *
15061  *==========================================================================*/
startAllChannels()15062 int32_t QCamera3HardwareInterface::startAllChannels()
15063 {
15064     int32_t rc = NO_ERROR;
15065 
15066     LOGD("Start all channels ");
15067     // Start the Streams/Channels
15068     if (mMetadataChannel) {
15069         /* If content of mStreamInfo is not 0, there is metadata stream */
15070         rc = mMetadataChannel->start();
15071         if (rc < 0) {
15072             LOGE("META channel start failed");
15073             return rc;
15074         }
15075     }
15076     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
15077         it != mStreamInfo.end(); it++) {
15078         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
15079         if (channel) {
15080             rc = channel->start();
15081             if (rc < 0) {
15082                 LOGE("channel start failed");
15083                 return rc;
15084             }
15085         }
15086     }
15087     if (mAnalysisChannel) {
15088         mAnalysisChannel->start();
15089     }
15090     if (mSupportChannel) {
15091         rc = mSupportChannel->start();
15092         if (rc < 0) {
15093             LOGE("Support channel start failed");
15094             return rc;
15095         }
15096     }
15097     if (mRawDumpChannel) {
15098         rc = mRawDumpChannel->start();
15099         if (rc < 0) {
15100             LOGE("RAW dump channel start failed");
15101             return rc;
15102         }
15103     }
15104     if (mHdrPlusRawSrcChannel) {
15105         rc = mHdrPlusRawSrcChannel->start();
15106         if (rc < 0) {
15107             LOGE("HDR+ RAW channel start failed");
15108             return rc;
15109         }
15110     }
15111 
15112     LOGD("All channels started");
15113     return rc;
15114 }
15115 
15116 /*===========================================================================
15117  * FUNCTION   : notifyErrorForPendingRequests
15118  *
15119  * DESCRIPTION: This function sends error for all the pending requests/buffers
15120  *
15121  * PARAMETERS : None
15122  *
15123  * RETURN     : Error codes
15124  *              NO_ERROR on success
15125  *
15126  *==========================================================================*/
notifyErrorForPendingRequests()15127 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
15128 {
15129     notifyErrorFoPendingDepthData(mDepthChannel);
15130 
15131     auto pendingRequest = mPendingRequestsList.begin();
15132     auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15133 
15134     // Iterate through pending requests (for which result metadata isn't sent yet) and pending
15135     // buffers (for which buffers aren't sent yet).
15136     while (pendingRequest != mPendingRequestsList.end() ||
15137            pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15138         if (pendingRequest == mPendingRequestsList.end() ||
15139             pendingBuffer->frame_number < pendingRequest->frame_number) {
15140             // If metadata for this frame was sent, notify about a buffer error and returns buffers
15141             // with error.
15142             for (auto &info : pendingBuffer->mPendingBufferList) {
15143                 // Send a buffer error for this frame number.
15144                 camera3_notify_msg_t notify_msg;
15145                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
15146                 notify_msg.type = CAMERA3_MSG_ERROR;
15147                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15148                 notify_msg.message.error.error_stream = info.stream;
15149                 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
15150                 orchestrateNotify(&notify_msg);
15151 
15152                 camera3_stream_buffer_t buffer = {};
15153                 buffer.acquire_fence = -1;
15154                 buffer.release_fence = -1;
15155                 buffer.buffer = info.buffer;
15156                 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15157                 buffer.stream = info.stream;
15158                 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
15159             }
15160 
15161             pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
15162         } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
15163                    pendingBuffer->frame_number > pendingRequest->frame_number) {
15164             // If the buffers for this frame were sent already, notify about a result error.
15165             camera3_notify_msg_t notify_msg;
15166             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
15167             notify_msg.type = CAMERA3_MSG_ERROR;
15168             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
15169             notify_msg.message.error.error_stream = nullptr;
15170             notify_msg.message.error.frame_number = pendingRequest->frame_number;
15171             orchestrateNotify(&notify_msg);
15172 
15173             if (pendingRequest->input_buffer != nullptr) {
15174                 camera3_capture_result result = {};
15175                 result.frame_number = pendingRequest->frame_number;
15176                 result.result = nullptr;
15177                 result.input_buffer = pendingRequest->input_buffer;
15178                 orchestrateResult(&result);
15179             }
15180 
15181             mShutterDispatcher.clear(pendingRequest->frame_number);
15182             pendingRequest = mPendingRequestsList.erase(pendingRequest);
15183         } else {
15184             // If both buffers and result metadata weren't sent yet, notify about a request error
15185             // and return buffers with error.
15186             for (auto &info : pendingBuffer->mPendingBufferList) {
15187                 camera3_notify_msg_t notify_msg;
15188                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
15189                 notify_msg.type = CAMERA3_MSG_ERROR;
15190                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
15191                 notify_msg.message.error.error_stream = info.stream;
15192                 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
15193                 orchestrateNotify(&notify_msg);
15194 
15195                 camera3_stream_buffer_t buffer = {};
15196                 buffer.acquire_fence = -1;
15197                 buffer.release_fence = -1;
15198                 buffer.buffer = info.buffer;
15199                 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15200                 buffer.stream = info.stream;
15201                 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
15202             }
15203 
15204             if (pendingRequest->input_buffer != nullptr) {
15205                 camera3_capture_result result = {};
15206                 result.frame_number = pendingRequest->frame_number;
15207                 result.result = nullptr;
15208                 result.input_buffer = pendingRequest->input_buffer;
15209                 orchestrateResult(&result);
15210             }
15211 
15212             mShutterDispatcher.clear(pendingRequest->frame_number);
15213             pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
15214             pendingRequest = mPendingRequestsList.erase(pendingRequest);
15215         }
15216     }
15217 
15218     /* Reset pending frame Drop list and requests list */
15219     mPendingFrameDropList.clear();
15220     mShutterDispatcher.clear();
15221     mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
15222     mPendingBuffersMap.mPendingBuffersInRequest.clear();
15223     mExpectedFrameDuration = 0;
15224     mExpectedInflightDuration = 0;
15225     LOGH("Cleared all the pending buffers ");
15226 
15227     return NO_ERROR;
15228 }
15229 
isOnEncoder(const cam_dimension_t max_viewfinder_size,uint32_t width,uint32_t height)15230 bool QCamera3HardwareInterface::isOnEncoder(
15231         const cam_dimension_t max_viewfinder_size,
15232         uint32_t width, uint32_t height)
15233 {
15234     return ((width > (uint32_t)max_viewfinder_size.width) ||
15235             (height > (uint32_t)max_viewfinder_size.height) ||
15236             (width > (uint32_t)VIDEO_4K_WIDTH) ||
15237             (height > (uint32_t)VIDEO_4K_HEIGHT));
15238 }
15239 
15240 /*===========================================================================
15241  * FUNCTION   : setBundleInfo
15242  *
15243  * DESCRIPTION: Set bundle info for all streams that are bundle.
15244  *
15245  * PARAMETERS : None
15246  *
15247  * RETURN     : NO_ERROR on success
15248  *              Error codes on failure
15249  *==========================================================================*/
setBundleInfo()15250 int32_t QCamera3HardwareInterface::setBundleInfo()
15251 {
15252     int32_t rc = NO_ERROR;
15253 
15254     if (mChannelHandle) {
15255         cam_bundle_config_t bundleInfo;
15256         memset(&bundleInfo, 0, sizeof(bundleInfo));
15257         rc = mCameraHandle->ops->get_bundle_info(
15258                 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
15259         if (rc != NO_ERROR) {
15260             LOGE("get_bundle_info failed");
15261             return rc;
15262         }
15263         if (mAnalysisChannel) {
15264             mAnalysisChannel->setBundleInfo(bundleInfo);
15265         }
15266         if (mSupportChannel) {
15267             mSupportChannel->setBundleInfo(bundleInfo);
15268         }
15269         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
15270                 it != mStreamInfo.end(); it++) {
15271             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
15272             channel->setBundleInfo(bundleInfo);
15273         }
15274         if (mRawDumpChannel) {
15275             mRawDumpChannel->setBundleInfo(bundleInfo);
15276         }
15277         if (mHdrPlusRawSrcChannel) {
15278             mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
15279         }
15280     }
15281 
15282     return rc;
15283 }
15284 
15285 /*===========================================================================
15286  * FUNCTION   : setInstantAEC
15287  *
15288  * DESCRIPTION: Set Instant AEC related params.
15289  *
15290  * PARAMETERS :
15291  *      @meta: CameraMetadata reference
15292  *
15293  * RETURN     : NO_ERROR on success
15294  *              Error codes on failure
15295  *==========================================================================*/
setInstantAEC(const CameraMetadata & meta)15296 int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
15297 {
15298     int32_t rc = NO_ERROR;
15299     uint8_t val = 0;
15300     char prop[PROPERTY_VALUE_MAX];
15301 
15302     // First try to configure instant AEC from framework metadata
15303     if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
15304         val = meta.find(QCAMERA3_INSTANT_AEC_MODE).data.u8[0];
15305         LOGE("Instant AEC mode set: %d", val);
15306     }
15307 
15308     // If framework did not set this value, try to read from set prop.
15309     if (val == 0) {
15310         memset(prop, 0, sizeof(prop));
15311         property_get("persist.camera.instant.aec", prop, "0");
15312         val = (uint8_t)atoi(prop);
15313     }
15314 
15315     if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
15316            ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
15317         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
15318         mInstantAEC = val;
15319         mInstantAECSettledFrameNumber = 0;
15320         mInstantAecFrameIdxCount = 0;
15321         LOGH("instantAEC value set %d",val);
15322         if (mInstantAEC) {
15323             memset(prop, 0, sizeof(prop));
15324             property_get("persist.camera.ae.instant.bound", prop, "10");
15325             int32_t aec_frame_skip_cnt = atoi(prop);
15326             if (aec_frame_skip_cnt >= 0) {
15327                 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
15328             } else {
15329                 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
15330                 rc = BAD_VALUE;
15331             }
15332         }
15333     } else {
15334         LOGE("Bad instant aec value set %d", val);
15335         rc = BAD_VALUE;
15336     }
15337     return rc;
15338 }
15339 
15340 /*===========================================================================
15341  * FUNCTION   : get_num_overall_buffers
15342  *
15343  * DESCRIPTION: Estimate number of pending buffers across all requests.
15344  *
15345  * PARAMETERS : None
15346  *
15347  * RETURN     : Number of overall pending buffers
15348  *
15349  *==========================================================================*/
get_num_overall_buffers()15350 uint32_t PendingBuffersMap::get_num_overall_buffers()
15351 {
15352     uint32_t sum_buffers = 0;
15353     for (auto &req : mPendingBuffersInRequest) {
15354         sum_buffers += req.mPendingBufferList.size();
15355     }
15356     return sum_buffers;
15357 }
15358 
15359 /*===========================================================================
15360  * FUNCTION   : removeBuf
15361  *
15362  * DESCRIPTION: Remove a matching buffer from tracker.
15363  *
15364  * PARAMETERS : @buffer: image buffer for the callback
15365  *
15366  * RETURN     : None
15367  *
15368  *==========================================================================*/
removeBuf(buffer_handle_t * buffer)15369 void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
15370 {
15371     bool buffer_found = false;
15372     for (auto req = mPendingBuffersInRequest.begin();
15373             req != mPendingBuffersInRequest.end(); req++) {
15374         for (auto k = req->mPendingBufferList.begin();
15375                 k != req->mPendingBufferList.end(); k++ ) {
15376             if (k->buffer == buffer) {
15377                 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
15378                         req->frame_number, buffer);
15379                 k = req->mPendingBufferList.erase(k);
15380                 if (req->mPendingBufferList.empty()) {
15381                     // Remove this request from Map
15382                     req = mPendingBuffersInRequest.erase(req);
15383                 }
15384                 buffer_found = true;
15385                 break;
15386             }
15387         }
15388         if (buffer_found) {
15389             break;
15390         }
15391     }
15392     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
15393             get_num_overall_buffers());
15394 }
15395 
15396 /*===========================================================================
15397  * FUNCTION   : getBufErrStatus
15398  *
15399  * DESCRIPTION: get buffer error status
15400  *
15401  * PARAMETERS : @buffer: buffer handle
15402  *
15403  * RETURN     : Error status
15404  *
15405  *==========================================================================*/
getBufErrStatus(buffer_handle_t * buffer)15406 int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
15407 {
15408     for (auto& req : mPendingBuffersInRequest) {
15409         for (auto& k : req.mPendingBufferList) {
15410             if (k.buffer == buffer)
15411                 return k.bufStatus;
15412         }
15413     }
15414     return CAMERA3_BUFFER_STATUS_OK;
15415 }
15416 
15417 /*===========================================================================
15418  * FUNCTION   : setPAAFSupport
15419  *
15420  * DESCRIPTION: Set the preview-assisted auto focus support bit in
15421  *              feature mask according to stream type and filter
15422  *              arrangement
15423  *
15424  * PARAMETERS : @feature_mask: current feature mask, which may be modified
15425  *              @stream_type: stream type
15426  *              @filter_arrangement: filter arrangement
15427  *
15428  * RETURN     : None
15429  *==========================================================================*/
setPAAFSupport(cam_feature_mask_t & feature_mask,cam_stream_type_t stream_type,cam_color_filter_arrangement_t filter_arrangement)15430 void QCamera3HardwareInterface::setPAAFSupport(
15431         cam_feature_mask_t& feature_mask,
15432         cam_stream_type_t stream_type,
15433         cam_color_filter_arrangement_t filter_arrangement)
15434 {
15435     switch (filter_arrangement) {
15436     case CAM_FILTER_ARRANGEMENT_RGGB:
15437     case CAM_FILTER_ARRANGEMENT_GRBG:
15438     case CAM_FILTER_ARRANGEMENT_GBRG:
15439     case CAM_FILTER_ARRANGEMENT_BGGR:
15440         if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
15441                 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
15442                 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
15443             if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
15444                 feature_mask |= CAM_QCOM_FEATURE_PAAF;
15445         }
15446         break;
15447     case CAM_FILTER_ARRANGEMENT_Y:
15448         if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
15449             feature_mask |= CAM_QCOM_FEATURE_PAAF;
15450         }
15451         break;
15452     default:
15453         break;
15454     }
15455     LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
15456             feature_mask, stream_type, filter_arrangement);
15457 
15458 
15459 }
15460 
15461 /*===========================================================================
15462 * FUNCTION   : getSensorMountAngle
15463 *
15464 * DESCRIPTION: Retrieve sensor mount angle
15465 *
15466 * PARAMETERS : None
15467 *
15468 * RETURN     : sensor mount angle in uint32_t
15469 *==========================================================================*/
getSensorMountAngle()15470 uint32_t QCamera3HardwareInterface::getSensorMountAngle()
15471 {
15472     return gCamCapability[mCameraId]->sensor_mount_angle;
15473 }
15474 
15475 /*===========================================================================
15476 * FUNCTION   : getRelatedCalibrationData
15477 *
15478 * DESCRIPTION: Retrieve related system calibration data
15479 *
15480 * PARAMETERS : None
15481 *
15482 * RETURN     : Pointer of related system calibration data
15483 *==========================================================================*/
getRelatedCalibrationData()15484 const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
15485 {
15486     return (const cam_related_system_calibration_data_t *)
15487             &(gCamCapability[mCameraId]->related_cam_calibration);
15488 }
15489 
15490 /*===========================================================================
15491  * FUNCTION   : is60HzZone
15492  *
15493  * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
15494  *
15495  * PARAMETERS : None
15496  *
15497  * RETURN     : True if in 60Hz zone, False otherwise
15498  *==========================================================================*/
is60HzZone()15499 bool QCamera3HardwareInterface::is60HzZone()
15500 {
15501     time_t t = time(NULL);
15502     struct tm lt;
15503 
15504     struct tm* r = localtime_r(&t, &lt);
15505 
15506     if (r == NULL || lt.tm_gmtoff <=  -2*60*60 || lt.tm_gmtoff >= 8*60*60)
15507         return true;
15508     else
15509         return false;
15510 }
15511 
15512 /*===========================================================================
15513  * FUNCTION   : adjustBlackLevelForCFA
15514  *
15515  * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
15516  *              of bayer CFA (Color Filter Array).
15517  *
15518  * PARAMETERS : @input: black level pattern in the order of RGGB
15519  *              @output: black level pattern in the order of CFA
15520  *              @color_arrangement: CFA color arrangement
15521  *
15522  * RETURN     : None
15523  *==========================================================================*/
15524 template<typename T>
adjustBlackLevelForCFA(T input[BLACK_LEVEL_PATTERN_CNT],T output[BLACK_LEVEL_PATTERN_CNT],cam_color_filter_arrangement_t color_arrangement)15525 void QCamera3HardwareInterface::adjustBlackLevelForCFA(
15526         T input[BLACK_LEVEL_PATTERN_CNT],
15527         T output[BLACK_LEVEL_PATTERN_CNT],
15528         cam_color_filter_arrangement_t color_arrangement)
15529 {
15530     switch (color_arrangement) {
15531     case CAM_FILTER_ARRANGEMENT_GRBG:
15532         output[0] = input[1];
15533         output[1] = input[0];
15534         output[2] = input[3];
15535         output[3] = input[2];
15536         break;
15537     case CAM_FILTER_ARRANGEMENT_GBRG:
15538         output[0] = input[2];
15539         output[1] = input[3];
15540         output[2] = input[0];
15541         output[3] = input[1];
15542         break;
15543     case CAM_FILTER_ARRANGEMENT_BGGR:
15544         output[0] = input[3];
15545         output[1] = input[2];
15546         output[2] = input[1];
15547         output[3] = input[0];
15548         break;
15549     case CAM_FILTER_ARRANGEMENT_RGGB:
15550         output[0] = input[0];
15551         output[1] = input[1];
15552         output[2] = input[2];
15553         output[3] = input[3];
15554         break;
15555     default:
15556         LOGE("Invalid color arrangement to derive dynamic blacklevel");
15557         break;
15558     }
15559 }
15560 
updateHdrPlusResultMetadata(CameraMetadata & resultMetadata,std::shared_ptr<metadata_buffer_t> settings)15561 void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
15562     CameraMetadata &resultMetadata,
15563     std::shared_ptr<metadata_buffer_t> settings)
15564 {
15565     if (settings == nullptr) {
15566         ALOGE("%s: settings is nullptr.", __FUNCTION__);
15567         return;
15568     }
15569 
15570     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
15571         resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
15572     } else {
15573         resultMetadata.erase(ANDROID_JPEG_GPS_COORDINATES);
15574     }
15575 
15576     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
15577         String8 str((const char *)gps_methods);
15578         resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
15579     } else {
15580         resultMetadata.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
15581     }
15582 
15583     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
15584         resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
15585     } else {
15586         resultMetadata.erase(ANDROID_JPEG_GPS_TIMESTAMP);
15587     }
15588 
15589     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
15590         resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
15591     } else {
15592         resultMetadata.erase(ANDROID_JPEG_ORIENTATION);
15593     }
15594 
15595     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
15596         uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
15597         resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
15598     } else {
15599         resultMetadata.erase(ANDROID_JPEG_QUALITY);
15600     }
15601 
15602     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
15603         uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
15604         resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
15605     } else {
15606         resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_QUALITY);
15607     }
15608 
15609     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
15610         int32_t fwk_thumb_size[2];
15611         fwk_thumb_size[0] = thumb_size->width;
15612         fwk_thumb_size[1] = thumb_size->height;
15613         resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
15614     } else {
15615         resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_SIZE);
15616     }
15617 
15618     IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
15619         uint8_t fwk_intent = intent[0];
15620         resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
15621     } else {
15622         resultMetadata.erase(ANDROID_CONTROL_CAPTURE_INTENT);
15623     }
15624 }
15625 
isRequestHdrPlusCompatible(const camera3_capture_request_t & request,const CameraMetadata & metadata)15626 bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
15627         const camera3_capture_request_t &request, const CameraMetadata &metadata) {
15628     if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
15629             metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
15630         ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
15631         return false;
15632     }
15633 
15634     if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
15635          metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
15636             ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
15637         ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
15638                 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
15639         return false;
15640     }
15641 
15642     if (!metadata.exists(ANDROID_EDGE_MODE) ||
15643             metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
15644         ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
15645         return false;
15646     }
15647 
15648     if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
15649             metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
15650                     ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
15651         ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
15652         return false;
15653     }
15654 
15655     if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
15656             (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
15657              metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
15658                     ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
15659         ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
15660         return false;
15661     }
15662 
15663     if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
15664             metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
15665         ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
15666         return false;
15667     }
15668 
15669     if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
15670             metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
15671                     ANDROID_CONTROL_EFFECT_MODE_OFF) {
15672         ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
15673         return false;
15674     }
15675 
15676     if (!metadata.exists(ANDROID_CONTROL_MODE) ||
15677             (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
15678              metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
15679                     ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
15680         ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
15681         return false;
15682     }
15683 
15684     // TODO (b/32585046): support non-ZSL.
15685     if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
15686          metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
15687         ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
15688         return false;
15689     }
15690 
15691     // TODO (b/32586081): support flash.
15692     if (!metadata.exists(ANDROID_FLASH_MODE) ||
15693          metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
15694         ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
15695         return false;
15696     }
15697 
15698     if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
15699          metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
15700         ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
15701         return false;
15702     }
15703 
15704     switch (request.output_buffers[0].stream->format) {
15705         case HAL_PIXEL_FORMAT_BLOB:
15706         case HAL_PIXEL_FORMAT_YCbCr_420_888:
15707         case HAL_PIXEL_FORMAT_Y8:
15708         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15709             break;
15710         default:
15711             ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
15712             for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15713                 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
15714                         request.output_buffers[0].stream->width,
15715                         request.output_buffers[0].stream->height,
15716                         request.output_buffers[0].stream->format);
15717             }
15718             return false;
15719     }
15720 
15721     return true;
15722 }
15723 
abortPendingHdrplusRequest(HdrPlusPendingRequest * hdrPlusRequest)15724 void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
15725     if (hdrPlusRequest == nullptr) return;
15726 
15727     for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
15728         // Find the stream for this buffer.
15729         for (auto streamInfo : mStreamInfo) {
15730             if (streamInfo->id == outputBufferIter.first) {
15731                 if (streamInfo->channel == mPictureChannel) {
15732                     // For picture channel, this buffer is internally allocated so return this
15733                     // buffer to picture channel.
15734                     mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
15735                 } else {
15736                     // Unregister this buffer for other channels.
15737                     streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
15738                 }
15739                 break;
15740             }
15741         }
15742     }
15743 
15744     hdrPlusRequest->outputBuffers.clear();
15745     hdrPlusRequest->frameworkOutputBuffers.clear();
15746 }
15747 
15748 /*===========================================================================
15749  * FUNCTION   : isEISCropInSnapshotNeeded
15750  *
15751  * DESCRIPTION: In case EIS is active, check whether additional crop is needed
15752  *              to avoid FOV jumps in snapshot streams.
15753  *
15754  * PARAMETERS : @metadata: Current request settings.
15755  *
15756  * RETURN     : True in case EIS crop is needed, False otherwise.
15757  *==========================================================================*/
isEISCropInSnapshotNeeded(const CameraMetadata & metadata) const15758 bool QCamera3HardwareInterface::isEISCropInSnapshotNeeded(const CameraMetadata &metadata) const
15759 {
15760     if (metadata.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
15761         uint8_t vstabMode =
15762             metadata.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
15763         if (vstabMode == ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON) {
15764             if ((mLastEISCropInfo.delta_x != 0) || (mLastEISCropInfo.delta_y != 0) ||
15765                     (mLastEISCropInfo.delta_width != 0) || (mLastEISCropInfo.delta_height != 0)) {
15766                 return true;
15767             }
15768         }
15769     }
15770 
15771     return false;
15772 }
15773 
15774 /*===========================================================================
15775  * FUNCTION   : isCropValid
15776  *
15777  * DESCRIPTION: Crop sanity checks.
15778  *
15779  * PARAMETERS : @startX: Horizontal crop offset.
15780  *              @startY: Vertical crop offset.
15781  *              @width: Crop width.
15782  *              @height: Crop height.
15783  *              @maxWidth: Horizontal maximum size.
15784  *              @maxHeight: Vertical maximum size.
15785  *
15786  * RETURN     : True in case crop is valid, False otherwise.
15787  *==========================================================================*/
isCropValid(int32_t startX,int32_t startY,int32_t width,int32_t height,int32_t maxWidth,int32_t maxHeight) const15788 bool QCamera3HardwareInterface::isCropValid(int32_t startX, int32_t startY, int32_t width,
15789         int32_t height, int32_t maxWidth, int32_t maxHeight) const
15790 {
15791     if ((startX < 0) || (startY < 0) || (startX >= maxWidth) || (startY >= maxHeight)) {
15792         LOGE("Crop offset is invalid: %dx%d", startX, startY);
15793         return false;
15794     }
15795 
15796     if ((width < 0) || (height < 0) || (width >= maxWidth) || (height >= maxHeight)) {
15797         LOGE("Crop dimensions are invalid: %dx%d", width, height);
15798         return false;
15799     }
15800 
15801     if (((startX + width) > maxWidth)  || ((startY + height) > maxHeight)) {
15802         LOGE("Crop is out of bounds: %dx%d max %dx%d", startX + width, startY + height, maxWidth,
15803                 maxHeight);
15804         return false;
15805     }
15806 
15807     return true;
15808 }
15809 
trySubmittingHdrPlusRequestLocked(HdrPlusPendingRequest * hdrPlusRequest,const camera3_capture_request_t & request,const CameraMetadata & metadata)15810 bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
15811         HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
15812         const CameraMetadata &metadata)
15813 {
15814     if (hdrPlusRequest == nullptr) return false;
15815     if (!isRequestHdrPlusCompatible(request, metadata)) return false;
15816 
15817     status_t res = OK;
15818     pbcamera::CaptureRequest pbRequest;
15819     pbRequest.id = request.frame_number;
15820     // Iterate through all requested output buffers and add them to an HDR+ request.
15821     for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15822         // Find the index of the stream in mStreamInfo.
15823         uint32_t pbStreamId = 0;
15824         bool found = false;
15825         for (auto streamInfo : mStreamInfo) {
15826             if (streamInfo->stream == request.output_buffers[i].stream) {
15827                 pbStreamId = streamInfo->id;
15828                 found = true;
15829                 break;
15830             }
15831         }
15832 
15833         if (!found) {
15834             ALOGE("%s: requested stream was not configured.", __FUNCTION__);
15835             abortPendingHdrplusRequest(hdrPlusRequest);
15836             return false;
15837         }
15838         auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
15839         switch (request.output_buffers[i].stream->format) {
15840             case HAL_PIXEL_FORMAT_BLOB:
15841             {
15842                 // For jpeg output, get a YUV buffer from pic channel.
15843                 QCamera3PicChannel *picChannel =
15844                         (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
15845                 res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
15846                 if (res != OK) {
15847                     ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
15848                             __FUNCTION__, strerror(-res), res);
15849                     abortPendingHdrplusRequest(hdrPlusRequest);
15850                     return false;
15851                 }
15852                 break;
15853             }
15854             case HAL_PIXEL_FORMAT_YCbCr_420_888:
15855             case HAL_PIXEL_FORMAT_Y8:
15856             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15857             {
15858                 // For YUV output, register the buffer and get the buffer def from the channel.
15859                 QCamera3ProcessingChannel *channel =
15860                         (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
15861                 res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
15862                         outBuffer.get());
15863                 if (res != OK) {
15864                     ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
15865                             strerror(-res), res);
15866                     abortPendingHdrplusRequest(hdrPlusRequest);
15867                     return false;
15868                 }
15869                 break;
15870             }
15871             default:
15872                 abortPendingHdrplusRequest(hdrPlusRequest);
15873                 return false;
15874         }
15875 
15876         pbcamera::StreamBuffer buffer;
15877         buffer.streamId = pbStreamId;
15878         buffer.dmaBufFd = outBuffer->fd;
15879         buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
15880         buffer.dataSize = outBuffer->frame_len;
15881 
15882         pbRequest.outputBuffers.push_back(buffer);
15883 
15884         hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
15885         hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
15886     }
15887 
15888     if (isEISCropInSnapshotNeeded(metadata)) {
15889         int32_t scalerRegion[4] = {0, 0, gCamCapability[mCameraId]->active_array_size.width,
15890             gCamCapability[mCameraId]->active_array_size.height};
15891         if (metadata.exists(ANDROID_SCALER_CROP_REGION)) {
15892             auto currentScalerRegion = metadata.find(ANDROID_SCALER_CROP_REGION).data.i32;
15893             scalerRegion[0] = currentScalerRegion[0] + mLastEISCropInfo.delta_x;
15894             scalerRegion[1] = currentScalerRegion[1] + mLastEISCropInfo.delta_y;
15895             scalerRegion[2] = currentScalerRegion[2] - mLastEISCropInfo.delta_width;
15896             scalerRegion[3] = currentScalerRegion[3] - mLastEISCropInfo.delta_height;
15897         } else {
15898             scalerRegion[0] += mLastEISCropInfo.delta_x;
15899             scalerRegion[1] += mLastEISCropInfo.delta_y;
15900             scalerRegion[2] -= mLastEISCropInfo.delta_width;
15901             scalerRegion[3] -= mLastEISCropInfo.delta_height;
15902         }
15903 
15904         // Capture requests should not be modified.
15905         CameraMetadata updatedMetadata(metadata);
15906         if (isCropValid(scalerRegion[0], scalerRegion[1], scalerRegion[2], scalerRegion[3],
15907                     gCamCapability[mCameraId]->active_array_size.width,
15908                     gCamCapability[mCameraId]->active_array_size.height)) {
15909             updatedMetadata.update(ANDROID_SCALER_CROP_REGION, scalerRegion, 4);
15910         } else {
15911             LOGE("Invalid EIS compensated crop region");
15912         }
15913 
15914         res = gHdrPlusClient->submitCaptureRequest(&pbRequest, updatedMetadata);
15915     } else {
15916         res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
15917     }
15918 
15919     if (res != OK) {
15920         ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
15921                 strerror(-res), res);
15922         abortPendingHdrplusRequest(hdrPlusRequest);
15923         return false;
15924     }
15925 
15926     return true;
15927 }
15928 
openHdrPlusClientAsyncLocked()15929 status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
15930 {
15931     if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
15932         return OK;
15933     }
15934 
15935     status_t res = gEaselManagerClient->openHdrPlusClientAsync(mQCamera3HdrPlusListenerThread.get());
15936     if (res != OK) {
15937         ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
15938                 strerror(-res), res);
15939         return res;
15940     }
15941     gHdrPlusClientOpening = true;
15942 
15943     return OK;
15944 }
15945 
enableHdrPlusModeLocked()15946 status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
15947 {
15948     status_t res;
15949 
15950     if (mHdrPlusModeEnabled) {
15951         return OK;
15952     }
15953 
15954     // Check if gHdrPlusClient is opened or being opened.
15955     if (gHdrPlusClient == nullptr) {
15956         if (gHdrPlusClientOpening) {
15957             // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
15958             return OK;
15959         }
15960 
15961         res = openHdrPlusClientAsyncLocked();
15962         if (res != OK) {
15963             ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
15964                     strerror(-res), res);
15965             return res;
15966         }
15967 
15968         // When opening HDR+ client completes, HDR+ mode will be enabled.
15969         return OK;
15970 
15971     }
15972 
15973     // Configure stream for HDR+.
15974     res = configureHdrPlusStreamsLocked();
15975     if (res != OK) {
15976         LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
15977         return res;
15978     }
15979 
15980     // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
15981     res = gHdrPlusClient->setZslHdrPlusMode(true);
15982     if (res != OK) {
15983         LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15984         return res;
15985     }
15986 
15987     mHdrPlusModeEnabled = true;
15988     ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
15989 
15990     return OK;
15991 }
15992 
finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> & lock)15993 void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
15994 {
15995     if (gHdrPlusClientOpening) {
15996         gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
15997     }
15998 }
15999 
disableHdrPlusModeLocked()16000 void QCamera3HardwareInterface::disableHdrPlusModeLocked()
16001 {
16002     // Disable HDR+ mode.
16003     if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
16004         status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
16005         if (res != OK) {
16006             ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
16007         }
16008 
16009         // Close HDR+ client so Easel can enter low power mode.
16010         gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
16011         gHdrPlusClient = nullptr;
16012     }
16013 
16014     mHdrPlusModeEnabled = false;
16015     ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
16016 }
16017 
isSessionHdrPlusModeCompatible()16018 bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
16019 {
16020     // Check that at least one YUV or one JPEG output is configured.
16021     // TODO: Support RAW (b/36690506)
16022     for (auto streamInfo : mStreamInfo) {
16023         if (streamInfo != nullptr && streamInfo->stream != nullptr) {
16024             if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
16025                     (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
16026                      streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
16027                      streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
16028                 return true;
16029             }
16030         }
16031     }
16032 
16033     return false;
16034 }
16035 
configureHdrPlusStreamsLocked()16036 status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
16037 {
16038     pbcamera::InputConfiguration inputConfig;
16039     std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
16040     status_t res = OK;
16041 
16042     // Sensor MIPI will send data to Easel.
16043     inputConfig.isSensorInput = true;
16044     inputConfig.sensorMode.cameraId = mCameraId;
16045     inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
16046     inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
16047     inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
16048     inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
16049     inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
16050     inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
16051     inputConfig.sensorMode.timestampCropOffsetNs = mSensorModeInfo.timestamp_crop_offset;
16052 
16053     if (mSensorModeInfo.num_raw_bits != 10) {
16054         ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
16055                 mSensorModeInfo.num_raw_bits);
16056         return BAD_VALUE;
16057     }
16058 
16059     inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
16060 
16061     // Iterate through configured output streams in HAL and configure those streams in HDR+
16062     // service.
16063     for (auto streamInfo : mStreamInfo) {
16064         pbcamera::StreamConfiguration outputConfig;
16065         if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
16066             switch (streamInfo->stream->format) {
16067                 case HAL_PIXEL_FORMAT_BLOB:
16068                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
16069                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
16070                     res = fillPbStreamConfig(&outputConfig, streamInfo->id,
16071                             streamInfo->channel, /*stream index*/0);
16072                     if (res != OK) {
16073                         LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
16074                             __FUNCTION__, strerror(-res), res);
16075 
16076                         return res;
16077                     }
16078 
16079                     outputStreamConfigs.push_back(outputConfig);
16080                     break;
16081                 default:
16082                     // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
16083                     break;
16084             }
16085         }
16086     }
16087 
16088     res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
16089     if (res != OK) {
16090         LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
16091             strerror(-res), res);
16092         return res;
16093     }
16094 
16095     return OK;
16096 }
16097 
handleEaselFatalError()16098 void QCamera3HardwareInterface::handleEaselFatalError()
16099 {
16100     {
16101         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
16102         if (gHdrPlusClient != nullptr) {
16103             gHdrPlusClient->nofityEaselFatalError();
16104         }
16105     }
16106 
16107     pthread_mutex_lock(&mMutex);
16108     mState = ERROR;
16109     pthread_mutex_unlock(&mMutex);
16110 
16111     handleCameraDeviceError(/*stopChannelImmediately*/true);
16112 }
16113 
cleanupEaselErrorFuture()16114 void QCamera3HardwareInterface::cleanupEaselErrorFuture()
16115 {
16116     {
16117         std::lock_guard<std::mutex> lock(mEaselErrorFutureLock);
16118         if (!mEaselErrorFuture.valid()) {
16119             // If there is no Easel error, construct a dummy future to wait for.
16120             mEaselErrorFuture = std::async([]() { return; });
16121         }
16122     }
16123 
16124     mEaselErrorFuture.wait();
16125 }
16126 
handleEaselFatalErrorAsync()16127 void QCamera3HardwareInterface::handleEaselFatalErrorAsync()
16128 {
16129     std::lock_guard<std::mutex> lock(mEaselErrorFutureLock);
16130 
16131     if (mEaselErrorFuture.valid()) {
16132         // The error future has been invoked.
16133         return;
16134     }
16135 
16136     // Launch a future to handle the fatal error.
16137     mEaselErrorFuture = std::async(std::launch::async,
16138             &QCamera3HardwareInterface::handleEaselFatalError, this);
16139 }
16140 
onEaselFatalError(std::string errMsg)16141 void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
16142 {
16143     ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
16144     handleEaselFatalErrorAsync();
16145 }
16146 
onOpened(std::unique_ptr<HdrPlusClient> client)16147 void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
16148 {
16149     int rc = NO_ERROR;
16150 
16151     if (client == nullptr) {
16152         ALOGE("%s: Opened client is null.", __FUNCTION__);
16153         return;
16154     }
16155 
16156     logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
16157     ALOGI("%s: HDR+ client opened.", __FUNCTION__);
16158 
16159     std::unique_lock<std::mutex> l(gHdrPlusClientLock);
16160     if (!gHdrPlusClientOpening) {
16161         ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
16162         return;
16163     }
16164 
16165     gHdrPlusClient = std::move(client);
16166     gHdrPlusClientOpening = false;
16167     gHdrPlusClientOpenCond.notify_one();
16168 
16169     // Set static metadata.
16170     status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
16171     if (res != OK) {
16172         LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
16173             __FUNCTION__, strerror(-res), res);
16174         gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
16175         gHdrPlusClient = nullptr;
16176         return;
16177     }
16178 
16179     // Enable HDR+ mode.
16180     res = enableHdrPlusModeLocked();
16181     if (res != OK) {
16182         LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
16183     }
16184 
16185     // Get Easel firmware version
16186     if (EaselManagerClientOpened) {
16187         rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
16188         if (rc != OK) {
16189             ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
16190         } else {
16191             mEaselFwUpdated = true;
16192         }
16193     }
16194 }
16195 
onOpenFailed(status_t err)16196 void QCamera3HardwareInterface::onOpenFailed(status_t err)
16197 {
16198     ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
16199     std::unique_lock<std::mutex> l(gHdrPlusClientLock);
16200     gHdrPlusClientOpening = false;
16201     gHdrPlusClientOpenCond.notify_one();
16202 }
16203 
onFatalError()16204 void QCamera3HardwareInterface::onFatalError()
16205 {
16206     ALOGE("%s: HDR+ client encountered a fatal error.", __FUNCTION__);
16207     handleEaselFatalErrorAsync();
16208 }
16209 
onShutter(uint32_t requestId,int64_t apSensorTimestampNs)16210 void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
16211 {
16212     ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
16213             __LINE__, requestId, apSensorTimestampNs);
16214 
16215     mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
16216 }
16217 
onNextCaptureReady(uint32_t requestId)16218 void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
16219 {
16220     pthread_mutex_lock(&mMutex);
16221 
16222     // Find the pending request for this result metadata.
16223     auto requestIter = mPendingRequestsList.begin();
16224     while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
16225         requestIter++;
16226     }
16227 
16228     if (requestIter == mPendingRequestsList.end()) {
16229         ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
16230         pthread_mutex_unlock(&mMutex);
16231         return;
16232     }
16233 
16234     requestIter->partial_result_cnt++;
16235 
16236     CameraMetadata metadata;
16237     uint8_t ready = true;
16238     metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
16239 
16240     // Send it to framework.
16241     camera3_capture_result_t result = {};
16242 
16243     result.result = metadata.getAndLock();
16244     // Populate metadata result
16245     result.frame_number = requestId;
16246     result.num_output_buffers = 0;
16247     result.output_buffers = NULL;
16248     result.partial_result = requestIter->partial_result_cnt;
16249 
16250     orchestrateResult(&result);
16251     metadata.unlock(result.result);
16252 
16253     pthread_mutex_unlock(&mMutex);
16254 }
16255 
onPostview(uint32_t requestId,std::unique_ptr<std::vector<uint8_t>> postview,uint32_t width,uint32_t height,uint32_t stride,int32_t format)16256 void QCamera3HardwareInterface::onPostview(uint32_t requestId,
16257         std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
16258         uint32_t stride, int32_t format)
16259 {
16260     if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
16261         ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
16262                 __LINE__, width, height, requestId);
16263         char buf[FILENAME_MAX] = {};
16264         snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
16265                 requestId, width, height);
16266 
16267         pbcamera::StreamConfiguration config = {};
16268         config.image.width = width;
16269         config.image.height = height;
16270         config.image.format = format;
16271 
16272         pbcamera::PlaneConfiguration plane = {};
16273         plane.stride = stride;
16274         plane.scanline = height;
16275 
16276         config.image.planes.push_back(plane);
16277 
16278         pbcamera::StreamBuffer buffer = {};
16279         buffer.streamId = 0;
16280         buffer.dmaBufFd = -1;
16281         buffer.data = postview->data();
16282         buffer.dataSize = postview->size();
16283 
16284         hdrplus_client_utils::writePpm(buf, config, buffer);
16285     }
16286 
16287     pthread_mutex_lock(&mMutex);
16288 
16289     // Find the pending request for this result metadata.
16290     auto requestIter = mPendingRequestsList.begin();
16291     while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
16292         requestIter++;
16293     }
16294 
16295     if (requestIter == mPendingRequestsList.end()) {
16296         ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
16297         pthread_mutex_unlock(&mMutex);
16298         return;
16299     }
16300 
16301     requestIter->partial_result_cnt++;
16302 
16303     CameraMetadata metadata;
16304     int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
16305             static_cast<int32_t>(stride)};
16306     metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
16307     metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
16308 
16309     // Send it to framework.
16310     camera3_capture_result_t result = {};
16311 
16312     result.result = metadata.getAndLock();
16313     // Populate metadata result
16314     result.frame_number = requestId;
16315     result.num_output_buffers = 0;
16316     result.output_buffers = NULL;
16317     result.partial_result = requestIter->partial_result_cnt;
16318 
16319     orchestrateResult(&result);
16320     metadata.unlock(result.result);
16321 
16322     pthread_mutex_unlock(&mMutex);
16323 }
16324 
onCaptureResult(pbcamera::CaptureResult * result,const camera_metadata_t & resultMetadata)16325 void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
16326         const camera_metadata_t &resultMetadata)
16327 {
16328     if (result == nullptr) {
16329         ALOGE("%s: result is nullptr.", __FUNCTION__);
16330         return;
16331     }
16332 
16333     // Find the pending HDR+ request.
16334     HdrPlusPendingRequest pendingRequest;
16335     {
16336         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
16337         auto req = mHdrPlusPendingRequests.find(result->requestId);
16338         pendingRequest = req->second;
16339     }
16340 
16341     // Update the result metadata with the settings of the HDR+ still capture request because
16342     // the result metadata belongs to a ZSL buffer.
16343     CameraMetadata metadata;
16344     metadata = &resultMetadata;
16345     updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
16346     camera_metadata_t* updatedResultMetadata = metadata.release();
16347 
16348     uint32_t halSnapshotStreamId = 0;
16349     if (mPictureChannel != nullptr) {
16350         halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
16351     }
16352 
16353     auto halMetadata = std::make_shared<metadata_buffer_t>();
16354     clear_metadata_buffer(halMetadata.get());
16355 
16356     // Convert updated result metadata to HAL metadata.
16357     status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
16358             halSnapshotStreamId, /*minFrameDuration*/0);
16359     if (res != 0) {
16360         ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
16361     }
16362 
16363     for (auto &outputBuffer : result->outputBuffers) {
16364         uint32_t streamId = outputBuffer.streamId;
16365 
16366         // Find the framework output buffer in the pending request.
16367         auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
16368         if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
16369             ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
16370                     streamId);
16371             continue;
16372         }
16373 
16374         camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
16375 
16376         // Find the channel for the output buffer.
16377         QCamera3ProcessingChannel *channel =
16378                 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
16379 
16380         // Find the output buffer def.
16381         auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
16382         if (outputBufferIter == pendingRequest.outputBuffers.end()) {
16383             ALOGE("%s: Cannot find output buffer", __FUNCTION__);
16384             continue;
16385         }
16386 
16387         std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
16388 
16389         // Check whether to dump the buffer.
16390         if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
16391                 frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
16392             // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
16393             char prop[PROPERTY_VALUE_MAX];
16394             property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
16395             bool dumpYuvOutput = atoi(prop);
16396 
16397             if (dumpYuvOutput) {
16398                 // Dump yuv buffer to a ppm file.
16399                 pbcamera::StreamConfiguration outputConfig;
16400                 status_t rc = fillPbStreamConfig(&outputConfig, streamId,
16401                         channel, /*stream index*/0);
16402                 if (rc == OK) {
16403                     char buf[FILENAME_MAX] = {};
16404                     snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
16405                             result->requestId, streamId,
16406                             outputConfig.image.width, outputConfig.image.height);
16407 
16408                     hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
16409                 } else {
16410                     LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
16411                             "%s (%d).", __FUNCTION__, strerror(-rc), rc);
16412                 }
16413             }
16414         }
16415 
16416         if (channel == mPictureChannel) {
16417             // Return the buffer to pic channel for encoding.
16418             mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
16419                     frameworkOutputBuffer->buffer, result->requestId,
16420                     halMetadata);
16421         } else {
16422             // Return the buffer to camera framework.
16423             pthread_mutex_lock(&mMutex);
16424             handleBufferWithLock(frameworkOutputBuffer, result->requestId);
16425             channel->unregisterBuffer(outputBufferDef.get());
16426             pthread_mutex_unlock(&mMutex);
16427         }
16428     }
16429 
16430     // Send HDR+ metadata to framework.
16431     {
16432         pthread_mutex_lock(&mMutex);
16433 
16434         // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
16435         handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
16436         pthread_mutex_unlock(&mMutex);
16437     }
16438 
16439     // Remove the HDR+ pending request.
16440     {
16441         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
16442         auto req = mHdrPlusPendingRequests.find(result->requestId);
16443         mHdrPlusPendingRequests.erase(req);
16444     }
16445 }
16446 
onFailedCaptureResult(pbcamera::CaptureResult * failedResult)16447 void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
16448 {
16449     if (failedResult == nullptr) {
16450         ALOGE("%s: Got an empty failed result.", __FUNCTION__);
16451         return;
16452     }
16453 
16454     ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
16455 
16456     // Find the pending HDR+ request.
16457     HdrPlusPendingRequest pendingRequest;
16458     {
16459         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
16460         auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
16461         if (req == mHdrPlusPendingRequests.end()) {
16462             ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
16463             return;
16464         }
16465         pendingRequest = req->second;
16466     }
16467 
16468     for (auto &outputBuffer : failedResult->outputBuffers) {
16469         uint32_t streamId = outputBuffer.streamId;
16470 
16471         // Find the channel
16472         // Find the framework output buffer in the pending request.
16473         auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
16474         if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
16475             ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
16476                     streamId);
16477             continue;
16478         }
16479 
16480         camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
16481 
16482         // Find the channel for the output buffer.
16483         QCamera3ProcessingChannel *channel =
16484                 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
16485 
16486         // Find the output buffer def.
16487         auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
16488         if (outputBufferIter == pendingRequest.outputBuffers.end()) {
16489             ALOGE("%s: Cannot find output buffer", __FUNCTION__);
16490             continue;
16491         }
16492 
16493         std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
16494 
16495         if (channel == mPictureChannel) {
16496             // Return the buffer to pic channel.
16497             mPictureChannel->returnYuvBuffer(outputBufferDef.get());
16498         } else {
16499             channel->unregisterBuffer(outputBufferDef.get());
16500         }
16501     }
16502 
16503     // Remove the HDR+ pending request.
16504     {
16505         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
16506         auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
16507         mHdrPlusPendingRequests.erase(req);
16508     }
16509 
16510     pthread_mutex_lock(&mMutex);
16511 
16512     // Find the pending buffers.
16513     auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
16514     while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
16515         if (pendingBuffers->frame_number == failedResult->requestId) {
16516             break;
16517         }
16518         pendingBuffers++;
16519     }
16520 
16521     // Send out request errors for the pending buffers.
16522     if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
16523         std::vector<camera3_stream_buffer_t> streamBuffers;
16524         for (auto &buffer : pendingBuffers->mPendingBufferList) {
16525             // Prepare a stream buffer.
16526             camera3_stream_buffer_t streamBuffer = {};
16527             streamBuffer.stream = buffer.stream;
16528             streamBuffer.buffer = buffer.buffer;
16529             streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
16530             streamBuffer.acquire_fence = -1;
16531             streamBuffer.release_fence = -1;
16532 
16533             // Send out request error event.
16534             camera3_notify_msg_t notify_msg = {};
16535             notify_msg.type = CAMERA3_MSG_ERROR;
16536             notify_msg.message.error.frame_number = pendingBuffers->frame_number;
16537             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
16538             notify_msg.message.error.error_stream = buffer.stream;
16539 
16540             orchestrateNotify(&notify_msg);
16541             mOutputBufferDispatcher.markBufferReady(pendingBuffers->frame_number, streamBuffer);
16542         }
16543 
16544         mShutterDispatcher.clear(pendingBuffers->frame_number);
16545 
16546 
16547 
16548         // Remove pending buffers.
16549         mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
16550     }
16551 
16552     // Remove pending request.
16553     auto halRequest = mPendingRequestsList.begin();
16554     while (halRequest != mPendingRequestsList.end()) {
16555         if (halRequest->frame_number == failedResult->requestId) {
16556             mPendingRequestsList.erase(halRequest);
16557             break;
16558         }
16559         halRequest++;
16560     }
16561 
16562     pthread_mutex_unlock(&mMutex);
16563 }
16564 
readSensorCalibration(int activeArrayWidth,float poseRotation[4],float poseTranslation[3],float cameraIntrinsics[5],float radialDistortion[5])16565 bool QCamera3HardwareInterface::readSensorCalibration(
16566         int activeArrayWidth,
16567         float poseRotation[4], float poseTranslation[3],
16568         float cameraIntrinsics[5], float radialDistortion[5]) {
16569 
16570     const char* calibrationPath = "/persist/sensors/calibration/calibration.xml";
16571 
16572     using namespace tinyxml2;
16573 
16574     XMLDocument calibrationXml;
16575     XMLError err = calibrationXml.LoadFile(calibrationPath);
16576     if (err != XML_SUCCESS) {
16577         ALOGE("Unable to load calibration file '%s'. Error: %s",
16578                 calibrationPath, XMLDocument::ErrorIDToName(err));
16579         return false;
16580     }
16581     XMLElement *rig = calibrationXml.FirstChildElement("rig");
16582     if (rig == nullptr) {
16583         ALOGE("No 'rig' in calibration file");
16584         return false;
16585     }
16586     XMLElement *cam = rig->FirstChildElement("camera");
16587     XMLElement *camModel = nullptr;
16588     while (cam != nullptr) {
16589         camModel = cam->FirstChildElement("camera_model");
16590         if (camModel == nullptr) {
16591             ALOGE("No 'camera_model' in calibration file");
16592             return false;
16593         }
16594         int modelIndex = camModel->IntAttribute("index", -1);
16595         // Model index "0" has the calibration we need
16596         if (modelIndex == 0) {
16597             break;
16598         }
16599         cam = cam->NextSiblingElement("camera");
16600     }
16601     if (cam == nullptr) {
16602         ALOGE("No 'camera' in calibration file");
16603         return false;
16604     }
16605     const char *modelType = camModel->Attribute("type");
16606     if (modelType == nullptr || strcmp(modelType,"calibu_fu_fv_u0_v0_k1_k2_k3")) {
16607         ALOGE("Camera model is unknown type %s",
16608                 modelType ? modelType : "NULL");
16609         return false;
16610     }
16611     XMLElement *modelWidth = camModel->FirstChildElement("width");
16612     if (modelWidth == nullptr || modelWidth->GetText() == nullptr) {
16613         ALOGE("No camera model width in calibration file");
16614         return false;
16615     }
16616     int width = atoi(modelWidth->GetText());
16617     XMLElement *modelHeight = camModel->FirstChildElement("height");
16618     if (modelHeight == nullptr || modelHeight->GetText() == nullptr) {
16619         ALOGE("No camera model height in calibration file");
16620         return false;
16621     }
16622     int height = atoi(modelHeight->GetText());
16623     if (width <= 0 || height <= 0) {
16624         ALOGE("Bad model width or height in calibration file: %d x %d", width, height);
16625         return false;
16626     }
16627     ALOGI("Width: %d, Height: %d", width, height);
16628 
16629     XMLElement *modelParams = camModel->FirstChildElement("params");
16630     if (modelParams == nullptr) {
16631         ALOGE("No camera model params in calibration file");
16632         return false;
16633     }
16634     const char* paramText = modelParams->GetText();
16635     if (paramText == nullptr) {
16636         ALOGE("No parameters in params element in calibration file");
16637         return false;
16638     }
16639     ALOGI("Parameters: %s", paramText);
16640 
16641     // Parameter string is of the form "[ float; float; float ...]"
16642     float params[7];
16643     bool success = parseStringArray(paramText, params, 7);
16644     if (!success) {
16645         ALOGE("Malformed camera parameter string in calibration file");
16646         return false;
16647     }
16648 
16649     XMLElement *extCalib = rig->FirstChildElement("extrinsic_calibration");
16650     while (extCalib != nullptr) {
16651         int id = extCalib->IntAttribute("frame_B_id", -1);
16652         if (id == 0) {
16653             break;
16654         }
16655         extCalib = extCalib->NextSiblingElement("extrinsic_calibration");
16656     }
16657     if (extCalib == nullptr) {
16658         ALOGE("No 'extrinsic_calibration' in calibration file");
16659         return false;
16660     }
16661 
16662     XMLElement *q = extCalib->FirstChildElement("A_q_B");
16663     if (q == nullptr || q->GetText() == nullptr) {
16664         ALOGE("No extrinsic quarternion in calibration file");
16665         return false;
16666     }
16667     float rotation[4];
16668     success = parseStringArray(q->GetText(), rotation, 4);
16669     if (!success) {
16670         ALOGE("Malformed extrinsic quarternion string in calibration file");
16671         return false;
16672     }
16673 
16674     XMLElement *p = extCalib->FirstChildElement("A_p_B");
16675     if (p == nullptr || p->GetText() == nullptr) {
16676         ALOGE("No extrinsic translation in calibration file");
16677         return false;
16678     }
16679     float position[3];
16680     success = parseStringArray(p->GetText(), position, 3);
16681     if (!success) {
16682         ALOGE("Malformed extrinsic position string in calibration file");
16683         return false;
16684     }
16685 
16686     // Map from width x height to active array
16687     float scaleFactor = static_cast<float>(activeArrayWidth) / width;
16688 
16689     cameraIntrinsics[0] = params[0] * scaleFactor; // fu -> f_x
16690     cameraIntrinsics[1] = params[1] * scaleFactor; // fv -> f_y
16691     cameraIntrinsics[2] = params[2] * scaleFactor; // u0 -> c_x
16692     cameraIntrinsics[3] = params[3] * scaleFactor; // v0 -> c_y
16693     cameraIntrinsics[4] = 0; // s = 0
16694 
16695     radialDistortion[0] = params[4]; // k1 -> k_1
16696     radialDistortion[1] = params[5]; // k2 -> k_2
16697     radialDistortion[2] = params[6]; // k3 -> k_3
16698     radialDistortion[3] = 0; // k_4 = 0
16699     radialDistortion[4] = 0; // k_5 = 0
16700 
16701     for (int i = 0; i < 4; i++) {
16702         poseRotation[i] = rotation[i];
16703     }
16704     for (int i = 0; i < 3; i++) {
16705         poseTranslation[i] = position[i];
16706     }
16707 
16708     ALOGI("Intrinsics: %f, %f, %f, %f, %f", cameraIntrinsics[0],
16709             cameraIntrinsics[1], cameraIntrinsics[2],
16710             cameraIntrinsics[3], cameraIntrinsics[4]);
16711     ALOGI("Distortion: %f, %f, %f, %f, %f",
16712             radialDistortion[0], radialDistortion[1], radialDistortion[2], radialDistortion[3],
16713             radialDistortion[4]);
16714     ALOGI("Pose rotation: %f, %f, %f, %f",
16715             poseRotation[0], poseRotation[1], poseRotation[2], poseRotation[3]);
16716     ALOGI("Pose translation: %f, %f, %f",
16717             poseTranslation[0], poseTranslation[1], poseTranslation[2]);
16718 
16719     return true;
16720 }
16721 
parseStringArray(const char * str,float * dest,int count)16722 bool QCamera3HardwareInterface::parseStringArray(const char *str, float *dest, int count) {
16723     size_t idx = 0;
16724     size_t len = strlen(str);
16725     for (; idx < len; idx++) {
16726         if (str[idx] == '[') break;
16727     }
16728     const char *startParam = str + idx + 1;
16729     if (startParam >= str + len) {
16730         ALOGE("Malformed array: %s", str);
16731         return false;
16732     }
16733     char *endParam = nullptr;
16734     for (int i = 0; i < count; i++) {
16735         dest[i] = strtod(startParam, &endParam);
16736         if (startParam == endParam) {
16737             ALOGE("Malformed array, index %d: %s", i, str);
16738             return false;
16739         }
16740         startParam = endParam + 1;
16741         if (startParam >= str + len) {
16742             ALOGE("Malformed array, index %d: %s", i, str);
16743             return false;
16744         }
16745     }
16746     return true;
16747 }
16748 
ShutterDispatcher(QCamera3HardwareInterface * parent)16749 ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
16750         mParent(parent) {}
16751 
expectShutter(uint32_t frameNumber,bool isReprocess,bool isZsl)16752 void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess, bool isZsl)
16753 {
16754     std::lock_guard<std::mutex> lock(mLock);
16755 
16756     if (isReprocess) {
16757         mReprocessShutters.emplace(frameNumber, Shutter());
16758     } else if (isZsl) {
16759         mZslShutters.emplace(frameNumber, Shutter());
16760     } else {
16761         mShutters.emplace(frameNumber, Shutter());
16762     }
16763 }
16764 
markShutterReady(uint32_t frameNumber,uint64_t timestamp)16765 void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
16766 {
16767     std::lock_guard<std::mutex> lock(mLock);
16768 
16769     std::map<uint32_t, Shutter> *shutters = nullptr;
16770 
16771     // Find the shutter entry.
16772     auto shutter = mShutters.find(frameNumber);
16773     if (shutter != mShutters.end()) {
16774         shutters = &mShutters;
16775     } else {
16776         shutter = mReprocessShutters.find(frameNumber);
16777         if (shutter != mReprocessShutters.end()) {
16778             shutters = &mReprocessShutters;
16779         } else {
16780             shutter = mZslShutters.find(frameNumber);
16781             if (shutter != mZslShutters.end()) {
16782                 shutters = &mZslShutters;
16783             } else {
16784                 // Shutter was already sent.
16785                 return;
16786             }
16787         }
16788     }
16789 
16790     if (shutter->second.ready) {
16791         // If shutter is already ready, don't update timestamp again.
16792         return;
16793     }
16794 
16795     // Make this frame's shutter ready.
16796     shutter->second.ready = true;
16797     shutter->second.timestamp = timestamp;
16798 
16799     // Iterate throught the shutters and send out shuters until the one that's not ready yet.
16800     shutter = shutters->begin();
16801     while (shutter != shutters->end()) {
16802         if (!shutter->second.ready) {
16803             // If this shutter is not ready, the following shutters can't be sent.
16804             break;
16805         }
16806 
16807         camera3_notify_msg_t msg = {};
16808         msg.type = CAMERA3_MSG_SHUTTER;
16809         msg.message.shutter.frame_number = shutter->first;
16810         msg.message.shutter.timestamp = shutter->second.timestamp;
16811         mParent->orchestrateNotify(&msg);
16812 
16813         shutter = shutters->erase(shutter);
16814     }
16815 }
16816 
clear(uint32_t frameNumber)16817 void ShutterDispatcher::clear(uint32_t frameNumber)
16818 {
16819     std::lock_guard<std::mutex> lock(mLock);
16820     mShutters.erase(frameNumber);
16821     mReprocessShutters.erase(frameNumber);
16822     mZslShutters.erase(frameNumber);
16823 }
16824 
clear()16825 void ShutterDispatcher::clear()
16826 {
16827     std::lock_guard<std::mutex> lock(mLock);
16828 
16829     // Log errors for stale shutters.
16830     for (auto &shutter : mShutters) {
16831         ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
16832             __FUNCTION__, shutter.first, shutter.second.ready,
16833             shutter.second.timestamp);
16834     }
16835 
16836     // Log errors for stale reprocess shutters.
16837     for (auto &shutter : mReprocessShutters) {
16838         ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
16839             __FUNCTION__, shutter.first, shutter.second.ready,
16840             shutter.second.timestamp);
16841     }
16842 
16843     // Log errors for stale ZSL shutters.
16844     for (auto &shutter : mZslShutters) {
16845         ALOGE("%s: stale zsl shutter: frame number %u, ready %d, timestamp %" PRId64,
16846             __FUNCTION__, shutter.first, shutter.second.ready,
16847             shutter.second.timestamp);
16848     }
16849 
16850     mShutters.clear();
16851     mReprocessShutters.clear();
16852     mZslShutters.clear();
16853 }
16854 
OutputBufferDispatcher(QCamera3HardwareInterface * parent)16855 OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
16856         mParent(parent) {}
16857 
configureStreams(camera3_stream_configuration_t * streamList)16858 status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
16859 {
16860     std::lock_guard<std::mutex> lock(mLock);
16861     mStreamBuffers.clear();
16862     if (!streamList) {
16863         ALOGE("%s: streamList is nullptr.", __FUNCTION__);
16864         return -EINVAL;
16865     }
16866 
16867     // Create a "frame-number -> buffer" map for each stream.
16868     for (uint32_t i = 0; i < streamList->num_streams; i++) {
16869         mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
16870     }
16871 
16872     return OK;
16873 }
16874 
expectBuffer(uint32_t frameNumber,camera3_stream_t * stream)16875 status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
16876 {
16877     std::lock_guard<std::mutex> lock(mLock);
16878 
16879     // Find the "frame-number -> buffer" map for the stream.
16880     auto buffers = mStreamBuffers.find(stream);
16881     if (buffers == mStreamBuffers.end()) {
16882         ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
16883         return -EINVAL;
16884     }
16885 
16886     // Create an unready buffer for this frame number.
16887     buffers->second.emplace(frameNumber, Buffer());
16888     return OK;
16889 }
16890 
markBufferReady(uint32_t frameNumber,const camera3_stream_buffer_t & buffer)16891 void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
16892         const camera3_stream_buffer_t &buffer)
16893 {
16894     std::lock_guard<std::mutex> lock(mLock);
16895 
16896     // Find the frame number -> buffer map for the stream.
16897     auto buffers = mStreamBuffers.find(buffer.stream);
16898     if (buffers == mStreamBuffers.end()) {
16899         ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
16900         return;
16901     }
16902 
16903     // Find the unready buffer this frame number and mark it ready.
16904     auto pendingBuffer = buffers->second.find(frameNumber);
16905     if (pendingBuffer == buffers->second.end()) {
16906         ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
16907         return;
16908     }
16909 
16910     pendingBuffer->second.ready = true;
16911     pendingBuffer->second.buffer = buffer;
16912 
16913     // Iterate through the buffers and send out buffers until the one that's not ready yet.
16914     pendingBuffer = buffers->second.begin();
16915     while (pendingBuffer != buffers->second.end()) {
16916         if (!pendingBuffer->second.ready) {
16917             // If this buffer is not ready, the following buffers can't be sent.
16918             break;
16919         }
16920 
16921         camera3_capture_result_t result = {};
16922         result.frame_number = pendingBuffer->first;
16923         result.num_output_buffers = 1;
16924         result.output_buffers = &pendingBuffer->second.buffer;
16925 
16926         // Send out result with buffer errors.
16927         mParent->orchestrateResult(&result);
16928 
16929         pendingBuffer = buffers->second.erase(pendingBuffer);
16930     }
16931 }
16932 
clear(bool clearConfiguredStreams)16933 void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
16934 {
16935     std::lock_guard<std::mutex> lock(mLock);
16936 
16937     // Log errors for stale buffers.
16938     for (auto &buffers : mStreamBuffers) {
16939         for (auto &buffer : buffers.second) {
16940             ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
16941                 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
16942         }
16943         buffers.second.clear();
16944     }
16945 
16946     if (clearConfiguredStreams) {
16947         mStreamBuffers.clear();
16948     }
16949 }
16950 
16951 }; //end namespace qcamera
16952