• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define LOG_TAG "QCamera3HWI"
31 //#define LOG_NDEBUG 0
32 
33 #define __STDC_LIMIT_MACROS
34 
35 // To remove
36 #include <cutils/properties.h>
37 
38 // System dependencies
39 #include <dlfcn.h>
40 #include <fcntl.h>
41 #include <stdio.h>
42 #include <stdlib.h>
43 #include "utils/Timers.h"
44 #include "sys/ioctl.h"
45 #include <time.h>
46 #include <sync/sync.h>
47 #include "gralloc_priv.h"
48 #include <map>
49 
50 // Display dependencies
51 #include "qdMetaData.h"
52 
53 // Camera dependencies
54 #include "android/QCamera3External.h"
55 #include "util/QCameraFlash.h"
56 #include "QCamera3HWI.h"
57 #include "QCamera3VendorTags.h"
58 #include "QCameraTrace.h"
59 
60 #include "HdrPlusClientUtils.h"
61 
62 extern "C" {
63 #include "mm_camera_dbg.h"
64 }
65 #include "cam_cond.h"
66 
67 using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
68 using namespace android;
69 
70 namespace qcamera {
71 
72 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73 
74 #define EMPTY_PIPELINE_DELAY 2
75 // mm_camera has 2 partial results: 3A, and final result.
76 // HDR+ requests have 3 partial results: postview, next request ready, and final result.
77 #define PARTIAL_RESULT_COUNT 3
78 #define FRAME_SKIP_DELAY     0
79 
80 #define MAX_VALUE_8BIT ((1<<8)-1)
81 #define MAX_VALUE_10BIT ((1<<10)-1)
82 #define MAX_VALUE_12BIT ((1<<12)-1)
83 
84 #define VIDEO_4K_WIDTH  3840
85 #define VIDEO_4K_HEIGHT 2160
86 
87 #define MAX_EIS_WIDTH 3840
88 #define MAX_EIS_HEIGHT 2160
89 
90 #define MAX_RAW_STREAMS        1
91 #define MAX_STALLING_STREAMS   1
92 #define MAX_PROCESSED_STREAMS  3
93 /* Batch mode is enabled only if FPS set is equal to or greater than this */
94 #define MIN_FPS_FOR_BATCH_MODE (120)
95 #define PREVIEW_FPS_FOR_HFR    (30)
96 #define DEFAULT_VIDEO_FPS      (30.0)
97 #define TEMPLATE_MAX_PREVIEW_FPS (30.0)
98 #define MAX_HFR_BATCH_SIZE     (8)
99 #define REGIONS_TUPLE_COUNT    5
100 // Set a threshold for detection of missing buffers //seconds
101 #define MISSING_REQUEST_BUF_TIMEOUT 5
102 #define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
103 #define FLUSH_TIMEOUT 3
104 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105 
106 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
107                                               CAM_QCOM_FEATURE_CROP |\
108                                               CAM_QCOM_FEATURE_ROTATION |\
109                                               CAM_QCOM_FEATURE_SHARPNESS |\
110                                               CAM_QCOM_FEATURE_SCALE |\
111                                               CAM_QCOM_FEATURE_CAC |\
112                                               CAM_QCOM_FEATURE_CDS )
113 /* Per configuration size for static metadata length*/
114 #define PER_CONFIGURATION_SIZE_3 (3)
115 
116 #define TIMEOUT_NEVER -1
117 
118 /* Face rect indices */
119 #define FACE_LEFT              0
120 #define FACE_TOP               1
121 #define FACE_RIGHT             2
122 #define FACE_BOTTOM            3
123 #define FACE_WEIGHT            4
124 
125 /* Face landmarks indices */
126 #define LEFT_EYE_X             0
127 #define LEFT_EYE_Y             1
128 #define RIGHT_EYE_X            2
129 #define RIGHT_EYE_Y            3
130 #define MOUTH_X                4
131 #define MOUTH_Y                5
132 #define TOTAL_LANDMARK_INDICES 6
133 
134 // Max preferred zoom
135 #define MAX_PREFERRED_ZOOM_RATIO 7.0
136 
137 // Whether to check for the GPU stride padding, or use the default
138 //#define CHECK_GPU_PIXEL_ALIGNMENT
139 
140 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142 extern pthread_mutex_t gCamLock;
143 volatile uint32_t gCamHal3LogLevel = 1;
144 extern uint8_t gNumCameraSessions;
145 
146 // Note that this doesn't support concurrent front and back camera b/35960155.
147 // The following Easel related variables must be protected by gHdrPlusClientLock.
148 std::unique_ptr<EaselManagerClient> gEaselManagerClient;
149 bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150 std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151 bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
152 std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
153 bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
154 bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
155 
156 // If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
157 bool gEaselBypassOnly;
158 
159 std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
160 
161 
162 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
163     {"On",  CAM_CDS_MODE_ON},
164     {"Off", CAM_CDS_MODE_OFF},
165     {"Auto",CAM_CDS_MODE_AUTO}
166 };
167 const QCamera3HardwareInterface::QCameraMap<
168         camera_metadata_enum_android_video_hdr_mode_t,
169         cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
170     { QCAMERA3_VIDEO_HDR_MODE_OFF,  CAM_VIDEO_HDR_MODE_OFF },
171     { QCAMERA3_VIDEO_HDR_MODE_ON,   CAM_VIDEO_HDR_MODE_ON }
172 };
173 
174 const QCamera3HardwareInterface::QCameraMap<
175         camera_metadata_enum_android_binning_correction_mode_t,
176         cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
177     { QCAMERA3_BINNING_CORRECTION_MODE_OFF,  CAM_BINNING_CORRECTION_MODE_OFF },
178     { QCAMERA3_BINNING_CORRECTION_MODE_ON,   CAM_BINNING_CORRECTION_MODE_ON }
179 };
180 
181 const QCamera3HardwareInterface::QCameraMap<
182         camera_metadata_enum_android_ir_mode_t,
183         cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
184     {QCAMERA3_IR_MODE_OFF,  CAM_IR_MODE_OFF},
185     {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
186     {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
187 };
188 
189 const QCamera3HardwareInterface::QCameraMap<
190         camera_metadata_enum_android_control_effect_mode_t,
191         cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
192     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
193     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
194     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
195     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
196     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
197     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
198     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
199     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
200     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
201 };
202 
203 const QCamera3HardwareInterface::QCameraMap<
204         camera_metadata_enum_android_control_awb_mode_t,
205         cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
206     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
207     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
208     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
209     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
210     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
211     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
212     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
213     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
214     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
215 };
216 
217 const QCamera3HardwareInterface::QCameraMap<
218         camera_metadata_enum_android_control_scene_mode_t,
219         cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
220     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
221     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
222     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
223     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
224     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
225     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
226     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
227     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
228     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
229     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
230     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
231     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
232     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
233     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
234     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
235     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE},
236     { ANDROID_CONTROL_SCENE_MODE_HDR,            CAM_SCENE_MODE_HDR}
237 };
238 
239 const QCamera3HardwareInterface::QCameraMap<
240         camera_metadata_enum_android_control_af_mode_t,
241         cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
242     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
243     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
244     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
245     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
246     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
247     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
248     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
249 };
250 
251 const QCamera3HardwareInterface::QCameraMap<
252         camera_metadata_enum_android_color_correction_aberration_mode_t,
253         cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
254     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
255             CAM_COLOR_CORRECTION_ABERRATION_OFF },
256     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
257             CAM_COLOR_CORRECTION_ABERRATION_FAST },
258     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
259             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
260 };
261 
262 const QCamera3HardwareInterface::QCameraMap<
263         camera_metadata_enum_android_control_ae_antibanding_mode_t,
264         cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
265     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
266     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
267     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
268     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
269 };
270 
271 const QCamera3HardwareInterface::QCameraMap<
272         camera_metadata_enum_android_control_ae_mode_t,
273         cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
274     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
275     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
276     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
277     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
278     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
279     { (camera_metadata_enum_android_control_ae_mode_t)
280       NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
281 };
282 
283 const QCamera3HardwareInterface::QCameraMap<
284         camera_metadata_enum_android_flash_mode_t,
285         cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
286     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
287     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
288     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
289 };
290 
291 const QCamera3HardwareInterface::QCameraMap<
292         camera_metadata_enum_android_statistics_face_detect_mode_t,
293         cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
294     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
295     { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
296     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
297 };
298 
299 const QCamera3HardwareInterface::QCameraMap<
300         camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
301         cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
302     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
303       CAM_FOCUS_UNCALIBRATED },
304     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
305       CAM_FOCUS_APPROXIMATE },
306     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
307       CAM_FOCUS_CALIBRATED }
308 };
309 
310 const QCamera3HardwareInterface::QCameraMap<
311         camera_metadata_enum_android_lens_state_t,
312         cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
313     { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
314     { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
315 };
316 
317 const int32_t available_thumbnail_sizes[] = {0, 0,
318                                              176, 144,
319                                              240, 144,
320                                              256, 144,
321                                              240, 160,
322                                              256, 154,
323                                              240, 240,
324                                              320, 240};
325 
326 const QCamera3HardwareInterface::QCameraMap<
327         camera_metadata_enum_android_sensor_test_pattern_mode_t,
328         cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
329     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
330     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
331     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
332     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
333     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
334     { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
335 };
336 
337 /* Since there is no mapping for all the options some Android enum are not listed.
338  * Also, the order in this list is important because while mapping from HAL to Android it will
339  * traverse from lower to higher index which means that for HAL values that are map to different
340  * Android values, the traverse logic will select the first one found.
341  */
342 const QCamera3HardwareInterface::QCameraMap<
343         camera_metadata_enum_android_sensor_reference_illuminant1_t,
344         cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
345     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
346     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
347     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
348     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
349     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
350     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
351     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
352     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
353     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
354     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
355     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
356     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
357     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
358     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
359     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
360     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
361 };
362 
363 const QCamera3HardwareInterface::QCameraMap<
364         int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
365     { 60, CAM_HFR_MODE_60FPS},
366     { 90, CAM_HFR_MODE_90FPS},
367     { 120, CAM_HFR_MODE_120FPS},
368     { 150, CAM_HFR_MODE_150FPS},
369     { 180, CAM_HFR_MODE_180FPS},
370     { 210, CAM_HFR_MODE_210FPS},
371     { 240, CAM_HFR_MODE_240FPS},
372     { 480, CAM_HFR_MODE_480FPS},
373 };
374 
375 const QCamera3HardwareInterface::QCameraMap<
376         qcamera3_ext_instant_aec_mode_t,
377         cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
378     { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
379     { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
380     { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
381 };
382 
383 const QCamera3HardwareInterface::QCameraMap<
384         qcamera3_ext_exposure_meter_mode_t,
385         cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
386     { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
387     { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
388     { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
389     { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
390     { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
391     { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
392     { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
393 };
394 
395 const QCamera3HardwareInterface::QCameraMap<
396         qcamera3_ext_iso_mode_t,
397         cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
398     { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
399     { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
400     { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
401     { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
402     { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
403     { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
404     { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
405     { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
406 };
407 
408 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
409     .initialize                         = QCamera3HardwareInterface::initialize,
410     .configure_streams                  = QCamera3HardwareInterface::configure_streams,
411     .register_stream_buffers            = NULL,
412     .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
413     .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
414     .get_metadata_vendor_tag_ops        = NULL,
415     .dump                               = QCamera3HardwareInterface::dump,
416     .flush                              = QCamera3HardwareInterface::flush,
417     .reserved                           = {0},
418 };
419 
420 // initialise to some default value
421 uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
422 
logEaselEvent(const char * tag,const char * event)423 static inline void logEaselEvent(const char *tag, const char *event) {
424     if (CC_UNLIKELY(gEaselProfilingEnabled)) {
425         struct timespec ts = {};
426         static int64_t kMsPerSec = 1000;
427         static int64_t kNsPerMs = 1000000;
428         status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
429         if (res != OK) {
430             ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
431         } else {
432             int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
433             ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
434         }
435     }
436 }
437 
438 /*===========================================================================
439  * FUNCTION   : QCamera3HardwareInterface
440  *
441  * DESCRIPTION: constructor of QCamera3HardwareInterface
442  *
443  * PARAMETERS :
444  *   @cameraId  : camera ID
445  *
446  * RETURN     : none
447  *==========================================================================*/
QCamera3HardwareInterface(uint32_t cameraId,const camera_module_callbacks_t * callbacks)448 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
449         const camera_module_callbacks_t *callbacks)
450     : mCameraId(cameraId),
451       mCameraHandle(NULL),
452       mCameraInitialized(false),
453       mCallbackOps(NULL),
454       mMetadataChannel(NULL),
455       mPictureChannel(NULL),
456       mRawChannel(NULL),
457       mSupportChannel(NULL),
458       mAnalysisChannel(NULL),
459       mRawDumpChannel(NULL),
460       mHdrPlusRawSrcChannel(NULL),
461       mDummyBatchChannel(NULL),
462       mDepthChannel(NULL),
463       mDepthCloudMode(CAM_PD_DATA_SKIP),
464       mPerfLockMgr(),
465       mChannelHandle(0),
466       mFirstConfiguration(true),
467       mFlush(false),
468       mFlushPerf(false),
469       mParamHeap(NULL),
470       mParameters(NULL),
471       mPrevParameters(NULL),
472       m_bIsVideo(false),
473       m_bIs4KVideo(false),
474       m_bEisSupportedSize(false),
475       m_bEisEnable(false),
476       m_bEis3PropertyEnabled(false),
477       m_bAVTimerEnabled(false),
478       m_MobicatMask(0),
479       mShutterDispatcher(this),
480       mOutputBufferDispatcher(this),
481       mMinProcessedFrameDuration(0),
482       mMinJpegFrameDuration(0),
483       mMinRawFrameDuration(0),
484       mExpectedFrameDuration(0),
485       mExpectedInflightDuration(0),
486       mMetaFrameCount(0U),
487       mUpdateDebugLevel(false),
488       mCallbacks(callbacks),
489       mCaptureIntent(0),
490       mCacMode(0),
491       /* DevCamDebug metadata internal m control*/
492       mDevCamDebugMetaEnable(0),
493       /* DevCamDebug metadata end */
494       mBatchSize(0),
495       mToBeQueuedVidBufs(0),
496       mHFRVideoFps(DEFAULT_VIDEO_FPS),
497       mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
498       mStreamConfig(false),
499       mCommon(),
500       mFirstFrameNumberInBatch(0),
501       mNeedSensorRestart(false),
502       mPreviewStarted(false),
503       mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
504       mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
505       mPDSupported(false),
506       mPDIndex(0),
507       mInstantAEC(false),
508       mResetInstantAEC(false),
509       mInstantAECSettledFrameNumber(0),
510       mAecSkipDisplayFrameBound(0),
511       mInstantAecFrameIdxCount(0),
512       mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
513       mLastRequestedFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF),
514       mCurrFeatureState(0),
515       mLdafCalibExist(false),
516       mLastCustIntentFrmNum(-1),
517       mFirstMetadataCallback(true),
518       mState(CLOSED),
519       mIsDeviceLinked(false),
520       mIsMainCamera(true),
521       mLinkedCameraId(0),
522       m_pDualCamCmdHeap(NULL),
523       m_pDualCamCmdPtr(NULL),
524       mHdrPlusModeEnabled(false),
525       mZslEnabled(false),
526       mEaselMipiStarted(false),
527       mIsApInputUsedForHdrPlus(false),
528       mFirstPreviewIntentSeen(false),
529       m_bSensorHDREnabled(false),
530       mAfTrigger(),
531       mSceneDistance(-1)
532 {
533     getLogLevel();
534     mCommon.init(gCamCapability[cameraId]);
535     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
536 #ifndef USE_HAL_3_3
537     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
538 #else
539     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
540 #endif
541     mCameraDevice.common.close = close_camera_device;
542     mCameraDevice.ops = &mCameraOps;
543     mCameraDevice.priv = this;
544     gCamCapability[cameraId]->version = CAM_HAL_V3;
545     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
546     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
547     gCamCapability[cameraId]->min_num_pp_bufs = 3;
548 
549     PTHREAD_COND_INIT(&mBuffersCond);
550 
551     PTHREAD_COND_INIT(&mRequestCond);
552     mPendingLiveRequest = 0;
553     mCurrentRequestId = -1;
554     pthread_mutex_init(&mMutex, NULL);
555 
556     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
557         mDefaultMetadata[i] = NULL;
558 
559     // Getting system props of different kinds
560     char prop[PROPERTY_VALUE_MAX];
561     memset(prop, 0, sizeof(prop));
562     property_get("persist.camera.raw.dump", prop, "0");
563     mEnableRawDump = atoi(prop);
564     property_get("persist.camera.hal3.force.hdr", prop, "0");
565     mForceHdrSnapshot = atoi(prop);
566 
567     if (mEnableRawDump)
568         LOGD("Raw dump from Camera HAL enabled");
569 
570     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
571     memset(mLdafCalib, 0, sizeof(mLdafCalib));
572 
573     memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
574     mEaselFwUpdated = false;
575 
576     memset(prop, 0, sizeof(prop));
577     property_get("persist.camera.tnr.preview", prop, "0");
578     m_bTnrPreview = (uint8_t)atoi(prop);
579 
580     memset(prop, 0, sizeof(prop));
581     property_get("persist.camera.swtnr.preview", prop, "1");
582     m_bSwTnrPreview = (uint8_t)atoi(prop);
583 
584     memset(prop, 0, sizeof(prop));
585     property_get("persist.camera.tnr.video", prop, "1");
586     m_bTnrVideo = (uint8_t)atoi(prop);
587 
588     memset(prop, 0, sizeof(prop));
589     property_get("persist.camera.avtimer.debug", prop, "0");
590     m_debug_avtimer = (uint8_t)atoi(prop);
591     LOGI("AV timer enabled: %d", m_debug_avtimer);
592 
593     memset(prop, 0, sizeof(prop));
594     property_get("persist.camera.cacmode.disable", prop, "0");
595     m_cacModeDisabled = (uint8_t)atoi(prop);
596 
597     m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
598     m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
599 
600     //Load and read GPU library.
601     lib_surface_utils = NULL;
602     LINK_get_surface_pixel_alignment = NULL;
603     mSurfaceStridePadding = CAM_PAD_TO_64;
604 #ifdef CHECK_GPU_PIXEL_ALIGNMENT
605     lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
606     if (lib_surface_utils) {
607         *(void **)&LINK_get_surface_pixel_alignment =
608                 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
609          if (LINK_get_surface_pixel_alignment) {
610              mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
611          }
612          dlclose(lib_surface_utils);
613     }
614 #endif
615     mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
616     mPDSupported = (0 <= mPDIndex) ? true : false;
617 
618     m60HzZone = is60HzZone();
619 }
620 
621 /*===========================================================================
622  * FUNCTION   : ~QCamera3HardwareInterface
623  *
624  * DESCRIPTION: destructor of QCamera3HardwareInterface
625  *
626  * PARAMETERS : none
627  *
628  * RETURN     : none
629  *==========================================================================*/
~QCamera3HardwareInterface()630 QCamera3HardwareInterface::~QCamera3HardwareInterface()
631 {
632     LOGD("E");
633 
634     int32_t rc = 0;
635 
636     // Disable power hint and enable the perf lock for close camera
637     mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
638     mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
639 
640     // Close HDR+ client first before destroying HAL.
641     {
642         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
643         finishHdrPlusClientOpeningLocked(l);
644         if (gHdrPlusClient != nullptr) {
645             // Disable HDR+ mode.
646             disableHdrPlusModeLocked();
647             // Disconnect Easel if it's connected.
648             gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
649             gHdrPlusClient = nullptr;
650         }
651     }
652 
653     // unlink of dualcam during close camera
654     if (mIsDeviceLinked) {
655         cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
656                 &m_pDualCamCmdPtr->bundle_info;
657         m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
658         m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
659         pthread_mutex_lock(&gCamLock);
660 
661         if (mIsMainCamera == 1) {
662             m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
663             m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
664             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
665             // related session id should be session id of linked session
666             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
667         } else {
668             m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
669             m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
670             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
671             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
672         }
673         m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
674         pthread_mutex_unlock(&gCamLock);
675 
676         rc = mCameraHandle->ops->set_dual_cam_cmd(
677                 mCameraHandle->camera_handle);
678         if (rc < 0) {
679             LOGE("Dualcam: Unlink failed, but still proceed to close");
680         }
681     }
682 
683     /* We need to stop all streams before deleting any stream */
684     if (mRawDumpChannel) {
685         mRawDumpChannel->stop();
686     }
687 
688     if (mHdrPlusRawSrcChannel) {
689         mHdrPlusRawSrcChannel->stop();
690     }
691 
692     // NOTE: 'camera3_stream_t *' objects are already freed at
693     //        this stage by the framework
694     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
695         it != mStreamInfo.end(); it++) {
696         QCamera3ProcessingChannel *channel = (*it)->channel;
697         if (channel) {
698             channel->stop();
699         }
700     }
701     if (mSupportChannel)
702         mSupportChannel->stop();
703 
704     if (mAnalysisChannel) {
705         mAnalysisChannel->stop();
706     }
707     if (mMetadataChannel) {
708         mMetadataChannel->stop();
709     }
710     if (mChannelHandle) {
711         stopChannelLocked(/*stop_immediately*/false);
712     }
713 
714     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
715         it != mStreamInfo.end(); it++) {
716         QCamera3ProcessingChannel *channel = (*it)->channel;
717         if (channel)
718             delete channel;
719         free (*it);
720     }
721     if (mSupportChannel) {
722         delete mSupportChannel;
723         mSupportChannel = NULL;
724     }
725 
726     if (mAnalysisChannel) {
727         delete mAnalysisChannel;
728         mAnalysisChannel = NULL;
729     }
730     if (mRawDumpChannel) {
731         delete mRawDumpChannel;
732         mRawDumpChannel = NULL;
733     }
734     if (mHdrPlusRawSrcChannel) {
735         delete mHdrPlusRawSrcChannel;
736         mHdrPlusRawSrcChannel = NULL;
737     }
738     if (mDummyBatchChannel) {
739         delete mDummyBatchChannel;
740         mDummyBatchChannel = NULL;
741     }
742 
743     mPictureChannel = NULL;
744     mDepthChannel = NULL;
745 
746     if (mMetadataChannel) {
747         delete mMetadataChannel;
748         mMetadataChannel = NULL;
749     }
750 
751     /* Clean up all channels */
752     if (mCameraInitialized) {
753         if(!mFirstConfiguration){
754             //send the last unconfigure
755             cam_stream_size_info_t stream_config_info;
756             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
757             stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
758             stream_config_info.buffer_info.max_buffers =
759                     m_bIs4KVideo ? 0 :
760                     m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
761             clear_metadata_buffer(mParameters);
762             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
763                     stream_config_info);
764             int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
765             if (rc < 0) {
766                 LOGE("set_parms failed for unconfigure");
767             }
768         }
769         deinitParameters();
770     }
771 
772     if (mChannelHandle) {
773         mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
774                 mChannelHandle);
775         LOGH("deleting channel %d", mChannelHandle);
776         mChannelHandle = 0;
777     }
778 
779     if (mState != CLOSED)
780         closeCamera();
781 
782     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
783         req.mPendingBufferList.clear();
784     }
785     mPendingBuffersMap.mPendingBuffersInRequest.clear();
786     for (pendingRequestIterator i = mPendingRequestsList.begin();
787             i != mPendingRequestsList.end();) {
788         i = erasePendingRequest(i);
789     }
790     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
791         if (mDefaultMetadata[i])
792             free_camera_metadata(mDefaultMetadata[i]);
793 
794     mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
795 
796     pthread_cond_destroy(&mRequestCond);
797 
798     pthread_cond_destroy(&mBuffersCond);
799 
800     pthread_mutex_destroy(&mMutex);
801     LOGD("X");
802 }
803 
804 /*===========================================================================
805  * FUNCTION   : erasePendingRequest
806  *
807  * DESCRIPTION: function to erase a desired pending request after freeing any
808  *              allocated memory
809  *
810  * PARAMETERS :
811  *   @i       : iterator pointing to pending request to be erased
812  *
813  * RETURN     : iterator pointing to the next request
814  *==========================================================================*/
815 QCamera3HardwareInterface::pendingRequestIterator
erasePendingRequest(pendingRequestIterator i)816         QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
817 {
818     if (i->input_buffer != NULL) {
819         free(i->input_buffer);
820         i->input_buffer = NULL;
821     }
822     if (i->settings != NULL)
823         free_camera_metadata((camera_metadata_t*)i->settings);
824 
825     mExpectedInflightDuration -= i->expectedFrameDuration;
826     if (mExpectedInflightDuration < 0) {
827         LOGE("Negative expected in-flight duration!");
828         mExpectedInflightDuration = 0;
829     }
830 
831     return mPendingRequestsList.erase(i);
832 }
833 
834 /*===========================================================================
835  * FUNCTION   : camEvtHandle
836  *
837  * DESCRIPTION: Function registered to mm-camera-interface to handle events
838  *
839  * PARAMETERS :
840  *   @camera_handle : interface layer camera handle
841  *   @evt           : ptr to event
842  *   @user_data     : user data ptr
843  *
844  * RETURN     : none
845  *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)846 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
847                                           mm_camera_event_t *evt,
848                                           void *user_data)
849 {
850     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
851     if (obj && evt) {
852         switch(evt->server_event_type) {
853             case CAM_EVENT_TYPE_DAEMON_DIED:
854                 pthread_mutex_lock(&obj->mMutex);
855                 obj->mState = ERROR;
856                 pthread_mutex_unlock(&obj->mMutex);
857                 LOGE("Fatal, camera daemon died");
858                 break;
859 
860             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
861                 LOGD("HAL got request pull from Daemon");
862                 pthread_mutex_lock(&obj->mMutex);
863                 obj->mWokenUpByDaemon = true;
864                 obj->unblockRequestIfNecessary();
865                 pthread_mutex_unlock(&obj->mMutex);
866                 break;
867 
868             default:
869                 LOGW("Warning: Unhandled event %d",
870                         evt->server_event_type);
871                 break;
872         }
873     } else {
874         LOGE("NULL user_data/evt");
875     }
876 }
877 
878 /*===========================================================================
879  * FUNCTION   : openCamera
880  *
881  * DESCRIPTION: open camera
882  *
883  * PARAMETERS :
884  *   @hw_device  : double ptr for camera device struct
885  *
886  * RETURN     : int32_t type of status
887  *              NO_ERROR  -- success
888  *              none-zero failure code
889  *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)890 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
891 {
892     int rc = 0;
893     if (mState != CLOSED) {
894         *hw_device = NULL;
895         return PERMISSION_DENIED;
896     }
897 
898     logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
899     mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
900     LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
901              mCameraId);
902 
903     if (mCameraHandle) {
904         LOGE("Failure: Camera already opened");
905         return ALREADY_EXISTS;
906     }
907 
908     {
909         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
910         if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
911             logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
912             rc = gEaselManagerClient->resume(this);
913             if (rc != 0) {
914                 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
915                 return rc;
916             }
917             mEaselFwUpdated = false;
918         }
919     }
920 
921     rc = openCamera();
922     if (rc == 0) {
923         *hw_device = &mCameraDevice.common;
924     } else {
925         *hw_device = NULL;
926 
927         // Suspend Easel because opening camera failed.
928         {
929             std::unique_lock<std::mutex> l(gHdrPlusClientLock);
930             if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
931                 status_t suspendErr = gEaselManagerClient->suspend();
932                 if (suspendErr != 0) {
933                     ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
934                             strerror(-suspendErr), suspendErr);
935                 }
936             }
937         }
938     }
939 
940     LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
941              mCameraId, rc);
942 
943     if (rc == NO_ERROR) {
944         mState = OPENED;
945     }
946 
947     return rc;
948 }
949 
950 /*===========================================================================
951  * FUNCTION   : openCamera
952  *
953  * DESCRIPTION: open camera
954  *
955  * PARAMETERS : none
956  *
957  * RETURN     : int32_t type of status
958  *              NO_ERROR  -- success
959  *              none-zero failure code
960  *==========================================================================*/
openCamera()961 int QCamera3HardwareInterface::openCamera()
962 {
963     int rc = 0;
964     char value[PROPERTY_VALUE_MAX];
965 
966     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
967 
968     rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
969     if (rc < 0) {
970         LOGE("Failed to reserve flash for camera id: %d",
971                 mCameraId);
972         return UNKNOWN_ERROR;
973     }
974 
975     rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
976     if (rc) {
977         LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
978         return rc;
979     }
980 
981     if (!mCameraHandle) {
982         LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
983         return -ENODEV;
984     }
985 
986     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
987             camEvtHandle, (void *)this);
988 
989     if (rc < 0) {
990         LOGE("Error, failed to register event callback");
991         /* Not closing camera here since it is already handled in destructor */
992         return FAILED_TRANSACTION;
993     }
994 
995     mExifParams.debug_params =
996             (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
997     if (mExifParams.debug_params) {
998         memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
999     } else {
1000         LOGE("Out of Memory. Allocation failed for 3A debug exif params");
1001         return NO_MEMORY;
1002     }
1003     mFirstConfiguration = true;
1004 
1005     //Notify display HAL that a camera session is active.
1006     //But avoid calling the same during bootup because camera service might open/close
1007     //cameras at boot time during its initialization and display service will also internally
1008     //wait for camera service to initialize first while calling this display API, resulting in a
1009     //deadlock situation. Since boot time camera open/close calls are made only to fetch
1010     //capabilities, no need of this display bw optimization.
1011     //Use "service.bootanim.exit" property to know boot status.
1012     property_get("service.bootanim.exit", value, "0");
1013     if (atoi(value) == 1) {
1014         pthread_mutex_lock(&gCamLock);
1015         if (gNumCameraSessions++ == 0) {
1016             setCameraLaunchStatus(true);
1017         }
1018         pthread_mutex_unlock(&gCamLock);
1019     }
1020 
1021     //fill the session id needed while linking dual cam
1022     pthread_mutex_lock(&gCamLock);
1023     rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1024         &sessionId[mCameraId]);
1025     pthread_mutex_unlock(&gCamLock);
1026 
1027     if (rc < 0) {
1028         LOGE("Error, failed to get sessiion id");
1029         return UNKNOWN_ERROR;
1030     } else {
1031         //Allocate related cam sync buffer
1032         //this is needed for the payload that goes along with bundling cmd for related
1033         //camera use cases
1034         m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1035         rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
1036         if(rc != OK) {
1037             rc = NO_MEMORY;
1038             LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1039             return NO_MEMORY;
1040         }
1041 
1042         //Map memory for related cam sync buffer
1043         rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
1044                 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1045                 m_pDualCamCmdHeap->getFd(0),
1046                 sizeof(cam_dual_camera_cmd_info_t),
1047                 m_pDualCamCmdHeap->getPtr(0));
1048         if(rc < 0) {
1049             LOGE("Dualcam: failed to map Related cam sync buffer");
1050             rc = FAILED_TRANSACTION;
1051             return NO_MEMORY;
1052         }
1053         m_pDualCamCmdPtr =
1054                 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
1055     }
1056 
1057     LOGH("mCameraId=%d",mCameraId);
1058 
1059     return NO_ERROR;
1060 }
1061 
1062 /*===========================================================================
1063  * FUNCTION   : closeCamera
1064  *
1065  * DESCRIPTION: close camera
1066  *
1067  * PARAMETERS : none
1068  *
1069  * RETURN     : int32_t type of status
1070  *              NO_ERROR  -- success
1071  *              none-zero failure code
1072  *==========================================================================*/
closeCamera()1073 int QCamera3HardwareInterface::closeCamera()
1074 {
1075     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
1076     int rc = NO_ERROR;
1077     char value[PROPERTY_VALUE_MAX];
1078 
1079     LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1080              mCameraId);
1081 
1082     // unmap memory for related cam sync buffer
1083     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
1084             CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
1085     if (NULL != m_pDualCamCmdHeap) {
1086         m_pDualCamCmdHeap->deallocate();
1087         delete m_pDualCamCmdHeap;
1088         m_pDualCamCmdHeap = NULL;
1089         m_pDualCamCmdPtr = NULL;
1090     }
1091 
1092     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1093     mCameraHandle = NULL;
1094 
1095     //reset session id to some invalid id
1096     pthread_mutex_lock(&gCamLock);
1097     sessionId[mCameraId] = 0xDEADBEEF;
1098     pthread_mutex_unlock(&gCamLock);
1099 
1100     //Notify display HAL that there is no active camera session
1101     //but avoid calling the same during bootup. Refer to openCamera
1102     //for more details.
1103     property_get("service.bootanim.exit", value, "0");
1104     if (atoi(value) == 1) {
1105         pthread_mutex_lock(&gCamLock);
1106         if (--gNumCameraSessions == 0) {
1107             setCameraLaunchStatus(false);
1108         }
1109         pthread_mutex_unlock(&gCamLock);
1110     }
1111 
1112     if (mExifParams.debug_params) {
1113         free(mExifParams.debug_params);
1114         mExifParams.debug_params = NULL;
1115     }
1116     if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1117         LOGW("Failed to release flash for camera id: %d",
1118                 mCameraId);
1119     }
1120     mState = CLOSED;
1121     LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1122          mCameraId, rc);
1123 
1124     {
1125         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1126         if (EaselManagerClientOpened) {
1127             rc = gEaselManagerClient->suspend();
1128             if (rc != 0) {
1129                 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1130             }
1131         }
1132     }
1133 
1134     return rc;
1135 }
1136 
1137 /*===========================================================================
1138  * FUNCTION   : initialize
1139  *
1140  * DESCRIPTION: Initialize frameworks callback functions
1141  *
1142  * PARAMETERS :
1143  *   @callback_ops : callback function to frameworks
1144  *
1145  * RETURN     :
1146  *
1147  *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)1148 int QCamera3HardwareInterface::initialize(
1149         const struct camera3_callback_ops *callback_ops)
1150 {
1151     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
1152     int rc;
1153 
1154     LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1155     pthread_mutex_lock(&mMutex);
1156 
1157     // Validate current state
1158     switch (mState) {
1159         case OPENED:
1160             /* valid state */
1161             break;
1162         default:
1163             LOGE("Invalid state %d", mState);
1164             rc = -ENODEV;
1165             goto err1;
1166     }
1167 
1168     rc = initParameters();
1169     if (rc < 0) {
1170         LOGE("initParamters failed %d", rc);
1171         goto err1;
1172     }
1173     mCallbackOps = callback_ops;
1174 
1175     mChannelHandle = mCameraHandle->ops->add_channel(
1176             mCameraHandle->camera_handle, NULL, NULL, this);
1177     if (mChannelHandle == 0) {
1178         LOGE("add_channel failed");
1179         rc = -ENOMEM;
1180         pthread_mutex_unlock(&mMutex);
1181         return rc;
1182     }
1183 
1184     pthread_mutex_unlock(&mMutex);
1185     mCameraInitialized = true;
1186     mState = INITIALIZED;
1187     LOGI("X");
1188     return 0;
1189 
1190 err1:
1191     pthread_mutex_unlock(&mMutex);
1192     return rc;
1193 }
1194 
1195 /*===========================================================================
1196  * FUNCTION   : validateStreamDimensions
1197  *
1198  * DESCRIPTION: Check if the configuration requested are those advertised
1199  *
1200  * PARAMETERS :
1201  *   @stream_list : streams to be configured
1202  *
1203  * RETURN     :
1204  *
1205  *==========================================================================*/
validateStreamDimensions(camera3_stream_configuration_t * streamList)1206 int QCamera3HardwareInterface::validateStreamDimensions(
1207         camera3_stream_configuration_t *streamList)
1208 {
1209     int rc = NO_ERROR;
1210     size_t count = 0;
1211     uint32_t depthWidth = 0;
1212     uint32_t depthHeight = 0;
1213     if (mPDSupported) {
1214         depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1215         depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1216     }
1217 
1218     camera3_stream_t *inputStream = NULL;
1219     /*
1220     * Loop through all streams to find input stream if it exists*
1221     */
1222     for (size_t i = 0; i< streamList->num_streams; i++) {
1223         if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1224             if (inputStream != NULL) {
1225                 LOGE("Error, Multiple input streams requested");
1226                 return -EINVAL;
1227             }
1228             inputStream = streamList->streams[i];
1229         }
1230     }
1231     /*
1232     * Loop through all streams requested in configuration
1233     * Check if unsupported sizes have been requested on any of them
1234     */
1235     for (size_t j = 0; j < streamList->num_streams; j++) {
1236         bool sizeFound = false;
1237         camera3_stream_t *newStream = streamList->streams[j];
1238 
1239         uint32_t rotatedHeight = newStream->height;
1240         uint32_t rotatedWidth = newStream->width;
1241         if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1242                 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1243             rotatedHeight = newStream->width;
1244             rotatedWidth = newStream->height;
1245         }
1246 
1247         /*
1248         * Sizes are different for each type of stream format check against
1249         * appropriate table.
1250         */
1251         switch (newStream->format) {
1252         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1253         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1254         case HAL_PIXEL_FORMAT_RAW10:
1255             if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1256                     (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1257                     mPDSupported) {
1258                 if ((depthWidth == newStream->width) &&
1259                         (depthHeight == newStream->height)) {
1260                     sizeFound = true;
1261                 }
1262                 break;
1263             }
1264             count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1265             for (size_t i = 0; i < count; i++) {
1266                 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1267                         (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1268                     sizeFound = true;
1269                     break;
1270                 }
1271             }
1272             break;
1273         case HAL_PIXEL_FORMAT_BLOB:
1274             if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1275                     mPDSupported) {
1276                 //As per spec. depth cloud should be sample count / 16
1277                 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
1278                 if ((depthSamplesCount == newStream->width) &&
1279                         (1 == newStream->height)) {
1280                     sizeFound = true;
1281                 }
1282                 break;
1283             }
1284             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1285             /* Verify set size against generated sizes table */
1286             for (size_t i = 0; i < count; i++) {
1287                 if (((int32_t)rotatedWidth ==
1288                         gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1289                         ((int32_t)rotatedHeight ==
1290                         gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1291                     sizeFound = true;
1292                     break;
1293                 }
1294             }
1295             break;
1296         case HAL_PIXEL_FORMAT_YCbCr_420_888:
1297         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1298         default:
1299             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1300                     || newStream->stream_type == CAMERA3_STREAM_INPUT
1301                     || IS_USAGE_ZSL(newStream->usage)) {
1302                 if (((int32_t)rotatedWidth ==
1303                                 gCamCapability[mCameraId]->active_array_size.width) &&
1304                                 ((int32_t)rotatedHeight ==
1305                                 gCamCapability[mCameraId]->active_array_size.height)) {
1306                     sizeFound = true;
1307                     break;
1308                 }
1309                 /* We could potentially break here to enforce ZSL stream
1310                  * set from frameworks always is full active array size
1311                  * but it is not clear from the spc if framework will always
1312                  * follow that, also we have logic to override to full array
1313                  * size, so keeping the logic lenient at the moment
1314                  */
1315             }
1316             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1317                     MAX_SIZES_CNT);
1318             for (size_t i = 0; i < count; i++) {
1319                 if (((int32_t)rotatedWidth ==
1320                             gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1321                             ((int32_t)rotatedHeight ==
1322                             gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1323                     sizeFound = true;
1324                     break;
1325                 }
1326             }
1327             break;
1328         } /* End of switch(newStream->format) */
1329 
1330         /* We error out even if a single stream has unsupported size set */
1331         if (!sizeFound) {
1332             LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1333                     rotatedWidth, rotatedHeight, newStream->format,
1334                     gCamCapability[mCameraId]->active_array_size.width,
1335                     gCamCapability[mCameraId]->active_array_size.height);
1336             rc = -EINVAL;
1337             break;
1338         }
1339     } /* End of for each stream */
1340     return rc;
1341 }
1342 
1343 /*===========================================================================
1344  * FUNCTION   : validateUsageFlags
1345  *
1346  * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1347  *
1348  * PARAMETERS :
1349  *   @stream_list : streams to be configured
1350  *
1351  * RETURN     :
1352  *   NO_ERROR if the usage flags are supported
1353  *   error code if usage flags are not supported
1354  *
1355  *==========================================================================*/
validateUsageFlags(const camera3_stream_configuration_t * streamList)1356 int QCamera3HardwareInterface::validateUsageFlags(
1357         const camera3_stream_configuration_t* streamList)
1358 {
1359     for (size_t j = 0; j < streamList->num_streams; j++) {
1360         const camera3_stream_t *newStream = streamList->streams[j];
1361 
1362         if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1363             (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1364              newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1365             continue;
1366         }
1367 
1368         // Here we only care whether it's EIS3 or not
1369         char is_type_value[PROPERTY_VALUE_MAX];
1370         property_get("persist.camera.is_type", is_type_value, "4");
1371         cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1372         if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1373                 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1374             isType = IS_TYPE_NONE;
1375 
1376         bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1377         bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1378         bool isZSL = IS_USAGE_ZSL(newStream->usage);
1379         bool forcePreviewUBWC = true;
1380         if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1381             forcePreviewUBWC = false;
1382         }
1383         cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1384                 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
1385         cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1386                 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
1387         cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1388                 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
1389 
1390         // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1391         // So color spaces will always match.
1392 
1393         // Check whether underlying formats of shared streams match.
1394         if (isVideo && isPreview && videoFormat != previewFormat) {
1395             LOGE("Combined video and preview usage flag is not supported");
1396             return -EINVAL;
1397         }
1398         if (isPreview && isZSL && previewFormat != zslFormat) {
1399             LOGE("Combined preview and zsl usage flag is not supported");
1400             return -EINVAL;
1401         }
1402         if (isVideo && isZSL && videoFormat != zslFormat) {
1403             LOGE("Combined video and zsl usage flag is not supported");
1404             return -EINVAL;
1405         }
1406     }
1407     return NO_ERROR;
1408 }
1409 
1410 /*===========================================================================
1411  * FUNCTION   : validateUsageFlagsForEis
1412  *
1413  * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1414  *
1415  * PARAMETERS :
1416  *   @stream_list : streams to be configured
1417  *
1418  * RETURN     :
1419  *   NO_ERROR if the usage flags are supported
1420  *   error code if usage flags are not supported
1421  *
1422  *==========================================================================*/
validateUsageFlagsForEis(const camera3_stream_configuration_t * streamList)1423 int QCamera3HardwareInterface::validateUsageFlagsForEis(
1424         const camera3_stream_configuration_t* streamList)
1425 {
1426     for (size_t j = 0; j < streamList->num_streams; j++) {
1427         const camera3_stream_t *newStream = streamList->streams[j];
1428 
1429         bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1430         bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1431 
1432         // Because EIS is "hard-coded" for certain use case, and current
1433        // implementation doesn't support shared preview and video on the same
1434         // stream, return failure if EIS is forced on.
1435         if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1436             LOGE("Combined video and preview usage flag is not supported due to EIS");
1437             return -EINVAL;
1438         }
1439     }
1440     return NO_ERROR;
1441 }
1442 
1443 /*==============================================================================
1444  * FUNCTION   : isSupportChannelNeeded
1445  *
1446  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1447  *
1448  * PARAMETERS :
1449  *   @stream_list : streams to be configured
1450  *   @stream_config_info : the config info for streams to be configured
1451  *
1452  * RETURN     : Boolen true/false decision
1453  *
1454  *==========================================================================*/
isSupportChannelNeeded(camera3_stream_configuration_t * streamList,cam_stream_size_info_t stream_config_info)1455 bool QCamera3HardwareInterface::isSupportChannelNeeded(
1456         camera3_stream_configuration_t *streamList,
1457         cam_stream_size_info_t stream_config_info)
1458 {
1459     uint32_t i;
1460     bool pprocRequested = false;
1461     /* Check for conditions where PProc pipeline does not have any streams*/
1462     for (i = 0; i < stream_config_info.num_streams; i++) {
1463         if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1464                 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1465             pprocRequested = true;
1466             break;
1467         }
1468     }
1469 
1470     if (pprocRequested == false )
1471         return true;
1472 
1473     /* Dummy stream needed if only raw or jpeg streams present */
1474     for (i = 0; i < streamList->num_streams; i++) {
1475         switch(streamList->streams[i]->format) {
1476             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1477             case HAL_PIXEL_FORMAT_RAW10:
1478             case HAL_PIXEL_FORMAT_RAW16:
1479             case HAL_PIXEL_FORMAT_BLOB:
1480                 break;
1481             default:
1482                 return false;
1483         }
1484     }
1485     return true;
1486 }
1487 
1488 /*==============================================================================
1489  * FUNCTION   : sensor_mode_info
1490  *
1491  * DESCRIPTION: Get sensor mode information based on current stream configuratoin
1492  *
1493  * PARAMETERS :
1494  *   @sensor_mode_info : sensor mode information (output)
1495  *
1496  * RETURN     : int32_t type of status
1497  *              NO_ERROR  -- success
1498  *              none-zero failure code
1499  *
1500  *==========================================================================*/
getSensorModeInfo(cam_sensor_mode_info_t & sensorModeInfo)1501 int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1502 {
1503     int32_t rc = NO_ERROR;
1504 
1505     cam_dimension_t max_dim = {0, 0};
1506     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1507         if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1508             max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1509         if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1510             max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1511     }
1512 
1513     clear_metadata_buffer(mParameters);
1514 
1515     rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1516             max_dim);
1517     if (rc != NO_ERROR) {
1518         LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1519         return rc;
1520     }
1521 
1522     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1523     if (rc != NO_ERROR) {
1524         LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1525         return rc;
1526     }
1527 
1528     clear_metadata_buffer(mParameters);
1529     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
1530 
1531     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1532             mParameters);
1533     if (rc != NO_ERROR) {
1534         LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1535         return rc;
1536     }
1537 
1538     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
1539     LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1540             "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1541             sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1542             sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1543             sensorModeInfo.num_raw_bits);
1544 
1545     return rc;
1546 }
1547 
1548 /*==============================================================================
1549  * FUNCTION   : getCurrentSensorModeInfo
1550  *
1551  * DESCRIPTION: Get sensor mode information that is currently selected.
1552  *
1553  * PARAMETERS :
1554  *   @sensorModeInfo : sensor mode information (output)
1555  *
1556  * RETURN     : int32_t type of status
1557  *              NO_ERROR  -- success
1558  *              none-zero failure code
1559  *
1560  *==========================================================================*/
getCurrentSensorModeInfo(cam_sensor_mode_info_t & sensorModeInfo)1561 int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1562 {
1563     int32_t rc = NO_ERROR;
1564 
1565     clear_metadata_buffer(mParameters);
1566     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1567 
1568     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1569             mParameters);
1570     if (rc != NO_ERROR) {
1571         LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1572         return rc;
1573     }
1574 
1575     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1576     LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1577             "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1578             sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1579             sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1580             sensorModeInfo.num_raw_bits);
1581 
1582     return rc;
1583 }
1584 
1585 /*==============================================================================
1586  * FUNCTION   : addToPPFeatureMask
1587  *
1588  * DESCRIPTION: add additional features to pp feature mask based on
1589  *              stream type and usecase
1590  *
1591  * PARAMETERS :
1592  *   @stream_format : stream type for feature mask
1593  *   @stream_idx : stream idx within postprocess_mask list to change
1594  *
1595  * RETURN     : NULL
1596  *
1597  *==========================================================================*/
addToPPFeatureMask(int stream_format,uint32_t stream_idx)1598 void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1599         uint32_t stream_idx)
1600 {
1601     char feature_mask_value[PROPERTY_VALUE_MAX];
1602     cam_feature_mask_t feature_mask;
1603     int args_converted;
1604     int property_len;
1605 
1606     /* Get feature mask from property */
1607 #ifdef _LE_CAMERA_
1608     char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1609     snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1610     property_len = property_get("persist.camera.hal3.feature",
1611             feature_mask_value, swtnr_feature_mask_value);
1612 #else
1613     property_len = property_get("persist.camera.hal3.feature",
1614             feature_mask_value, "0");
1615 #endif
1616     if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1617             (feature_mask_value[1] == 'x')) {
1618         args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1619     } else {
1620         args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1621     }
1622     if (1 != args_converted) {
1623         feature_mask = 0;
1624         LOGE("Wrong feature mask %s", feature_mask_value);
1625         return;
1626     }
1627 
1628     switch (stream_format) {
1629     case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1630         /* Add LLVD to pp feature mask only if video hint is enabled */
1631         if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1632             mStreamConfigInfo.postprocess_mask[stream_idx]
1633                     |= CAM_QTI_FEATURE_SW_TNR;
1634             LOGH("Added SW TNR to pp feature mask");
1635         } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1636             mStreamConfigInfo.postprocess_mask[stream_idx]
1637                     |= CAM_QCOM_FEATURE_LLVD;
1638             LOGH("Added LLVD SeeMore to pp feature mask");
1639         }
1640         if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1641                 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1642             mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1643         }
1644         if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1645                 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1646             mStreamConfigInfo.postprocess_mask[stream_idx] |=
1647                     CAM_QTI_FEATURE_BINNING_CORRECTION;
1648         }
1649         break;
1650     }
1651     default:
1652         break;
1653     }
1654     LOGD("PP feature mask %llx",
1655             mStreamConfigInfo.postprocess_mask[stream_idx]);
1656 }
1657 
1658 /*==============================================================================
1659  * FUNCTION   : updateFpsInPreviewBuffer
1660  *
1661  * DESCRIPTION: update FPS information in preview buffer.
1662  *
1663  * PARAMETERS :
1664  *   @metadata    : pointer to metadata buffer
1665  *   @frame_number: frame_number to look for in pending buffer list
1666  *
1667  * RETURN     : None
1668  *
1669  *==========================================================================*/
updateFpsInPreviewBuffer(metadata_buffer_t * metadata,uint32_t frame_number)1670 void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1671         uint32_t frame_number)
1672 {
1673     // Mark all pending buffers for this particular request
1674     // with corresponding framerate information
1675     for (List<PendingBuffersInRequest>::iterator req =
1676             mPendingBuffersMap.mPendingBuffersInRequest.begin();
1677             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1678         for(List<PendingBufferInfo>::iterator j =
1679                 req->mPendingBufferList.begin();
1680                 j != req->mPendingBufferList.end(); j++) {
1681             QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1682             if ((req->frame_number == frame_number) &&
1683                 (channel->getStreamTypeMask() &
1684                 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1685                 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1686                     CAM_INTF_PARM_FPS_RANGE, metadata) {
1687                     typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1688                     struct private_handle_t *priv_handle =
1689                         (struct private_handle_t *)(*(j->buffer));
1690                     setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1691                 }
1692             }
1693         }
1694     }
1695 }
1696 
1697 /*==============================================================================
1698  * FUNCTION   : updateTimeStampInPendingBuffers
1699  *
1700  * DESCRIPTION: update timestamp in display metadata for all pending buffers
1701  *              of a frame number
1702  *
1703  * PARAMETERS :
1704  *   @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1705  *   @timestamp   : timestamp to be set
1706  *
1707  * RETURN     : None
1708  *
1709  *==========================================================================*/
updateTimeStampInPendingBuffers(uint32_t frameNumber,nsecs_t timestamp)1710 void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1711         uint32_t frameNumber, nsecs_t timestamp)
1712 {
1713     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1714             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1715         // WAR: save the av_timestamp to the next frame
1716         if(req->frame_number == frameNumber + 1) {
1717             req->av_timestamp = timestamp;
1718         }
1719 
1720         if (req->frame_number != frameNumber)
1721             continue;
1722 
1723         for (auto k = req->mPendingBufferList.begin();
1724                 k != req->mPendingBufferList.end(); k++ ) {
1725             // WAR: update timestamp when it's not VT usecase
1726             QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1727             if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1728                 m_bAVTimerEnabled)) {
1729                     struct private_handle_t *priv_handle =
1730                         (struct private_handle_t *) (*(k->buffer));
1731                     setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1732             }
1733         }
1734     }
1735     return;
1736 }
1737 
1738 /*===========================================================================
1739  * FUNCTION   : configureStreams
1740  *
1741  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1742  *              and output streams.
1743  *
1744  * PARAMETERS :
1745  *   @stream_list : streams to be configured
1746  *
1747  * RETURN     :
1748  *
1749  *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)1750 int QCamera3HardwareInterface::configureStreams(
1751         camera3_stream_configuration_t *streamList)
1752 {
1753     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
1754     int rc = 0;
1755 
1756     // Acquire perfLock before configure streams
1757     mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
1758     rc = configureStreamsPerfLocked(streamList);
1759     mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
1760 
1761     return rc;
1762 }
1763 
1764 /*===========================================================================
1765  * FUNCTION   : configureStreamsPerfLocked
1766  *
1767  * DESCRIPTION: configureStreams while perfLock is held.
1768  *
1769  * PARAMETERS :
1770  *   @stream_list : streams to be configured
1771  *
1772  * RETURN     : int32_t type of status
1773  *              NO_ERROR  -- success
1774  *              none-zero failure code
1775  *==========================================================================*/
configureStreamsPerfLocked(camera3_stream_configuration_t * streamList)1776 int QCamera3HardwareInterface::configureStreamsPerfLocked(
1777         camera3_stream_configuration_t *streamList)
1778 {
1779     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
1780     int rc = 0;
1781 
1782     // Sanity check stream_list
1783     if (streamList == NULL) {
1784         LOGE("NULL stream configuration");
1785         return BAD_VALUE;
1786     }
1787     if (streamList->streams == NULL) {
1788         LOGE("NULL stream list");
1789         return BAD_VALUE;
1790     }
1791 
1792     if (streamList->num_streams < 1) {
1793         LOGE("Bad number of streams requested: %d",
1794                 streamList->num_streams);
1795         return BAD_VALUE;
1796     }
1797 
1798     if (streamList->num_streams >= MAX_NUM_STREAMS) {
1799         LOGE("Maximum number of streams %d exceeded: %d",
1800                 MAX_NUM_STREAMS, streamList->num_streams);
1801         return BAD_VALUE;
1802     }
1803 
1804     mOpMode = streamList->operation_mode;
1805     LOGD("mOpMode: %d", mOpMode);
1806 
1807     rc = validateUsageFlags(streamList);
1808     if (rc != NO_ERROR) {
1809         return rc;
1810     }
1811 
1812     // Disable HDR+ if it's enabled;
1813     {
1814         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1815         finishHdrPlusClientOpeningLocked(l);
1816         disableHdrPlusModeLocked();
1817     }
1818 
1819     /* first invalidate all the steams in the mStreamList
1820      * if they appear again, they will be validated */
1821     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1822             it != mStreamInfo.end(); it++) {
1823         QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1824         if (channel) {
1825           channel->stop();
1826         }
1827         (*it)->status = INVALID;
1828     }
1829 
1830     if (mRawDumpChannel) {
1831         mRawDumpChannel->stop();
1832         delete mRawDumpChannel;
1833         mRawDumpChannel = NULL;
1834     }
1835 
1836     if (mHdrPlusRawSrcChannel) {
1837         mHdrPlusRawSrcChannel->stop();
1838         delete mHdrPlusRawSrcChannel;
1839         mHdrPlusRawSrcChannel = NULL;
1840     }
1841 
1842     if (mSupportChannel)
1843         mSupportChannel->stop();
1844 
1845     if (mAnalysisChannel) {
1846         mAnalysisChannel->stop();
1847     }
1848     if (mMetadataChannel) {
1849         /* If content of mStreamInfo is not 0, there is metadata stream */
1850         mMetadataChannel->stop();
1851     }
1852     if (mChannelHandle) {
1853         stopChannelLocked(/*stop_immediately*/false);
1854     }
1855 
1856     pthread_mutex_lock(&mMutex);
1857 
1858     mPictureChannel = NULL;
1859 
1860     // Check state
1861     switch (mState) {
1862         case INITIALIZED:
1863         case CONFIGURED:
1864         case STARTED:
1865             /* valid state */
1866             break;
1867         default:
1868             LOGE("Invalid state %d", mState);
1869             pthread_mutex_unlock(&mMutex);
1870             return -ENODEV;
1871     }
1872 
1873     /* Check whether we have video stream */
1874     m_bIs4KVideo = false;
1875     m_bIsVideo = false;
1876     m_bEisSupportedSize = false;
1877     m_bTnrEnabled = false;
1878     m_bVideoHdrEnabled = false;
1879     bool isZsl = false;
1880     bool depthPresent = false;
1881     bool isPreview = false;
1882     uint32_t videoWidth = 0U;
1883     uint32_t videoHeight = 0U;
1884     size_t rawStreamCnt = 0;
1885     size_t stallStreamCnt = 0;
1886     size_t processedStreamCnt = 0;
1887     // Number of streams on ISP encoder path
1888     size_t numStreamsOnEncoder = 0;
1889     size_t numYuv888OnEncoder = 0;
1890     bool bYuv888OverrideJpeg = false;
1891     cam_dimension_t largeYuv888Size = {0, 0};
1892     cam_dimension_t maxViewfinderSize = {0, 0};
1893     bool bJpegExceeds4K = false;
1894     bool bJpegOnEncoder = false;
1895     bool bUseCommonFeatureMask = false;
1896     cam_feature_mask_t commonFeatureMask = 0;
1897     bool bSmallJpegSize = false;
1898     uint32_t width_ratio;
1899     uint32_t height_ratio;
1900     maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1901     camera3_stream_t *inputStream = NULL;
1902     bool isJpeg = false;
1903     cam_dimension_t jpegSize = {0, 0};
1904     cam_dimension_t previewSize = {0, 0};
1905     size_t pdStatCount = 0;
1906 
1907     cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1908 
1909     /*EIS configuration*/
1910     uint8_t eis_prop_set;
1911     uint32_t maxEisWidth = 0;
1912     uint32_t maxEisHeight = 0;
1913 
1914     // Initialize all instant AEC related variables
1915     mInstantAEC = false;
1916     mResetInstantAEC = false;
1917     mInstantAECSettledFrameNumber = 0;
1918     mAecSkipDisplayFrameBound = 0;
1919     mInstantAecFrameIdxCount = 0;
1920     mCurrFeatureState = 0;
1921     mStreamConfig = true;
1922 
1923     m_bAVTimerEnabled = false;
1924 
1925     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1926 
1927     size_t count = IS_TYPE_MAX;
1928     count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1929     for (size_t i = 0; i < count; i++) {
1930         if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
1931             (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1932             m_bEisSupported = true;
1933             break;
1934         }
1935     }
1936 
1937     if (m_bEisSupported) {
1938         maxEisWidth = MAX_EIS_WIDTH;
1939         maxEisHeight = MAX_EIS_HEIGHT;
1940     }
1941 
1942     /* EIS setprop control */
1943     char eis_prop[PROPERTY_VALUE_MAX];
1944     memset(eis_prop, 0, sizeof(eis_prop));
1945     property_get("persist.camera.eis.enable", eis_prop, "1");
1946     eis_prop_set = (uint8_t)atoi(eis_prop);
1947 
1948     m_bEisEnable = eis_prop_set && m_bEisSupported &&
1949             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1950 
1951     LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1952             m_bEisEnable, eis_prop_set, m_bEisSupported);
1953 
1954     /* stream configurations */
1955     for (size_t i = 0; i < streamList->num_streams; i++) {
1956         camera3_stream_t *newStream = streamList->streams[i];
1957         LOGI("stream[%d] type = %d, format = %d, width = %d, "
1958                 "height = %d, rotation = %d, usage = 0x%x",
1959                  i, newStream->stream_type, newStream->format,
1960                 newStream->width, newStream->height, newStream->rotation,
1961                 newStream->usage);
1962         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1963                 newStream->stream_type == CAMERA3_STREAM_INPUT){
1964             isZsl = true;
1965         }
1966         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1967                 IS_USAGE_PREVIEW(newStream->usage)) {
1968             isPreview = true;
1969         }
1970 
1971         if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1972             inputStream = newStream;
1973         }
1974 
1975         if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1976                 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
1977             isJpeg = true;
1978             jpegSize.width = newStream->width;
1979             jpegSize.height = newStream->height;
1980             if (newStream->width > VIDEO_4K_WIDTH ||
1981                     newStream->height > VIDEO_4K_HEIGHT)
1982                 bJpegExceeds4K = true;
1983         }
1984 
1985         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1986                 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1987             m_bIsVideo = true;
1988             // In HAL3 we can have multiple different video streams.
1989             // The variables video width and height are used below as
1990             // dimensions of the biggest of them
1991             if (videoWidth < newStream->width ||
1992                 videoHeight < newStream->height) {
1993               videoWidth = newStream->width;
1994               videoHeight = newStream->height;
1995             }
1996             if ((VIDEO_4K_WIDTH <= newStream->width) &&
1997                     (VIDEO_4K_HEIGHT <= newStream->height)) {
1998                 m_bIs4KVideo = true;
1999             }
2000             m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
2001                                   (newStream->height <= maxEisHeight);
2002 
2003         }
2004         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
2005                 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
2006             switch (newStream->format) {
2007             case HAL_PIXEL_FORMAT_BLOB:
2008                 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2009                     depthPresent = true;
2010                     break;
2011                 }
2012                 stallStreamCnt++;
2013                 if (isOnEncoder(maxViewfinderSize, newStream->width,
2014                         newStream->height)) {
2015                     numStreamsOnEncoder++;
2016                     bJpegOnEncoder = true;
2017                 }
2018                 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
2019                         newStream->width);
2020                 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2021                         newStream->height);;
2022                 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2023                         "FATAL: max_downscale_factor cannot be zero and so assert");
2024                 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2025                     (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2026                     LOGH("Setting small jpeg size flag to true");
2027                     bSmallJpegSize = true;
2028                 }
2029                 break;
2030             case HAL_PIXEL_FORMAT_RAW10:
2031             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2032             case HAL_PIXEL_FORMAT_RAW16:
2033                 rawStreamCnt++;
2034                 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2035                         (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2036                     pdStatCount++;
2037                 }
2038                 break;
2039             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2040                 processedStreamCnt++;
2041                 if (isOnEncoder(maxViewfinderSize, newStream->width,
2042                         newStream->height)) {
2043                     if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2044                             !IS_USAGE_ZSL(newStream->usage)) {
2045                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2046                     }
2047                     numStreamsOnEncoder++;
2048                 }
2049                 break;
2050             case HAL_PIXEL_FORMAT_YCbCr_420_888:
2051                 processedStreamCnt++;
2052                 if (isOnEncoder(maxViewfinderSize, newStream->width,
2053                         newStream->height)) {
2054                     // If Yuv888 size is not greater than 4K, set feature mask
2055                     // to SUPERSET so that it support concurrent request on
2056                     // YUV and JPEG.
2057                     if (newStream->width <= VIDEO_4K_WIDTH &&
2058                             newStream->height <= VIDEO_4K_HEIGHT) {
2059                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2060                     }
2061                     numStreamsOnEncoder++;
2062                     numYuv888OnEncoder++;
2063                     largeYuv888Size.width = newStream->width;
2064                     largeYuv888Size.height = newStream->height;
2065                 }
2066                 break;
2067             default:
2068                 processedStreamCnt++;
2069                 if (isOnEncoder(maxViewfinderSize, newStream->width,
2070                         newStream->height)) {
2071                     commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2072                     numStreamsOnEncoder++;
2073                 }
2074                 break;
2075             }
2076 
2077         }
2078     }
2079 
2080     if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2081             gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2082             !m_bIsVideo) {
2083         m_bEisEnable = false;
2084     }
2085 
2086     if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2087         pthread_mutex_unlock(&mMutex);
2088         return -EINVAL;
2089     }
2090 
2091     uint8_t forceEnableTnr = 0;
2092     char tnr_prop[PROPERTY_VALUE_MAX];
2093     memset(tnr_prop, 0, sizeof(tnr_prop));
2094     property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2095     forceEnableTnr = (uint8_t)atoi(tnr_prop);
2096 
2097     /* Logic to enable/disable TNR based on specific config size/etc.*/
2098     if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
2099             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2100         m_bTnrEnabled = true;
2101     else if (forceEnableTnr)
2102         m_bTnrEnabled = true;
2103 
2104     char videoHdrProp[PROPERTY_VALUE_MAX];
2105     memset(videoHdrProp, 0, sizeof(videoHdrProp));
2106     property_get("persist.camera.hdr.video", videoHdrProp, "0");
2107     uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2108 
2109     if (hdr_mode_prop == 1 && m_bIsVideo &&
2110             mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2111         m_bVideoHdrEnabled = true;
2112     else
2113         m_bVideoHdrEnabled = false;
2114 
2115 
2116     /* Check if num_streams is sane */
2117     if (stallStreamCnt > MAX_STALLING_STREAMS ||
2118             rawStreamCnt > MAX_RAW_STREAMS ||
2119             processedStreamCnt > MAX_PROCESSED_STREAMS) {
2120         LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2121                  stallStreamCnt, rawStreamCnt, processedStreamCnt);
2122         pthread_mutex_unlock(&mMutex);
2123         return -EINVAL;
2124     }
2125     /* Check whether we have zsl stream or 4k video case */
2126     if (isZsl && m_bIs4KVideo) {
2127         LOGE("Currently invalid configuration ZSL & 4K Video!");
2128         pthread_mutex_unlock(&mMutex);
2129         return -EINVAL;
2130     }
2131     /* Check if stream sizes are sane */
2132     if (numStreamsOnEncoder > 2) {
2133         LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2134         pthread_mutex_unlock(&mMutex);
2135         return -EINVAL;
2136     } else if (1 < numStreamsOnEncoder){
2137         bUseCommonFeatureMask = true;
2138         LOGH("Multiple streams above max viewfinder size, common mask needed");
2139     }
2140 
2141     /* Check if BLOB size is greater than 4k in 4k recording case */
2142     if (m_bIs4KVideo && bJpegExceeds4K) {
2143         LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2144         pthread_mutex_unlock(&mMutex);
2145         return -EINVAL;
2146     }
2147 
2148     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2149             depthPresent) {
2150         LOGE("HAL doesn't support depth streams in HFR mode!");
2151         pthread_mutex_unlock(&mMutex);
2152         return -EINVAL;
2153     }
2154 
2155     // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2156     // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2157     // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2158     // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2159     // configurations:
2160     //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2161     //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2162     //    (These two configurations will not have CAC2 enabled even in HQ modes.)
2163     if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2164         ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2165                 __func__);
2166         pthread_mutex_unlock(&mMutex);
2167         return -EINVAL;
2168     }
2169 
2170     // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2171     // the YUV stream's size is greater or equal to the JPEG size, set common
2172     // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2173     if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2174             jpegSize.width, jpegSize.height) &&
2175             largeYuv888Size.width > jpegSize.width &&
2176             largeYuv888Size.height > jpegSize.height) {
2177         bYuv888OverrideJpeg = true;
2178     } else if (!isJpeg && numStreamsOnEncoder > 1) {
2179         commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2180     }
2181 
2182     LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2183             maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2184             commonFeatureMask);
2185     LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2186             numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2187 
2188     rc = validateStreamDimensions(streamList);
2189     if (rc == NO_ERROR) {
2190         rc = validateStreamRotations(streamList);
2191     }
2192     if (rc != NO_ERROR) {
2193         LOGE("Invalid stream configuration requested!");
2194         pthread_mutex_unlock(&mMutex);
2195         return rc;
2196     }
2197 
2198     if (1 < pdStatCount) {
2199         LOGE("HAL doesn't support multiple PD streams");
2200         pthread_mutex_unlock(&mMutex);
2201         return -EINVAL;
2202     }
2203 
2204     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2205             (1 == pdStatCount)) {
2206         LOGE("HAL doesn't support PD streams in HFR mode!");
2207         pthread_mutex_unlock(&mMutex);
2208         return -EINVAL;
2209     }
2210 
2211     camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2212     for (size_t i = 0; i < streamList->num_streams; i++) {
2213         camera3_stream_t *newStream = streamList->streams[i];
2214         LOGH("newStream type = %d, stream format = %d "
2215                 "stream size : %d x %d, stream rotation = %d",
2216                  newStream->stream_type, newStream->format,
2217                 newStream->width, newStream->height, newStream->rotation);
2218         //if the stream is in the mStreamList validate it
2219         bool stream_exists = false;
2220         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2221                 it != mStreamInfo.end(); it++) {
2222             if ((*it)->stream == newStream) {
2223                 QCamera3ProcessingChannel *channel =
2224                     (QCamera3ProcessingChannel*)(*it)->stream->priv;
2225                 stream_exists = true;
2226                 if (channel)
2227                     delete channel;
2228                 (*it)->status = VALID;
2229                 (*it)->stream->priv = NULL;
2230                 (*it)->channel = NULL;
2231             }
2232         }
2233         if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2234             //new stream
2235             stream_info_t* stream_info;
2236             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2237             if (!stream_info) {
2238                LOGE("Could not allocate stream info");
2239                rc = -ENOMEM;
2240                pthread_mutex_unlock(&mMutex);
2241                return rc;
2242             }
2243             stream_info->stream = newStream;
2244             stream_info->status = VALID;
2245             stream_info->channel = NULL;
2246             stream_info->id = i; // ID will be re-assigned in cleanAndSortStreamInfo().
2247             mStreamInfo.push_back(stream_info);
2248         }
2249         /* Covers Opaque ZSL and API1 F/W ZSL */
2250         if (IS_USAGE_ZSL(newStream->usage)
2251                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2252             if (zslStream != NULL) {
2253                 LOGE("Multiple input/reprocess streams requested!");
2254                 pthread_mutex_unlock(&mMutex);
2255                 return BAD_VALUE;
2256             }
2257             zslStream = newStream;
2258         }
2259         /* Covers YUV reprocess */
2260         if (inputStream != NULL) {
2261             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2262                     && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2263                     && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2264                     && inputStream->width == newStream->width
2265                     && inputStream->height == newStream->height) {
2266                 if (zslStream != NULL) {
2267                     /* This scenario indicates multiple YUV streams with same size
2268                      * as input stream have been requested, since zsl stream handle
2269                      * is solely use for the purpose of overriding the size of streams
2270                      * which share h/w streams we will just make a guess here as to
2271                      * which of the stream is a ZSL stream, this will be refactored
2272                      * once we make generic logic for streams sharing encoder output
2273                      */
2274                     LOGH("Warning, Multiple ip/reprocess streams requested!");
2275                 }
2276                 zslStream = newStream;
2277             }
2278         }
2279     }
2280 
2281     /* If a zsl stream is set, we know that we have configured at least one input or
2282        bidirectional stream */
2283     if (NULL != zslStream) {
2284         mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2285         mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2286         mInputStreamInfo.format = zslStream->format;
2287         mInputStreamInfo.usage = zslStream->usage;
2288         LOGD("Input stream configured! %d x %d, format %d, usage %d",
2289                  mInputStreamInfo.dim.width,
2290                 mInputStreamInfo.dim.height,
2291                 mInputStreamInfo.format, mInputStreamInfo.usage);
2292     }
2293 
2294     cleanAndSortStreamInfo();
2295     if (mMetadataChannel) {
2296         delete mMetadataChannel;
2297         mMetadataChannel = NULL;
2298     }
2299     if (mSupportChannel) {
2300         delete mSupportChannel;
2301         mSupportChannel = NULL;
2302     }
2303 
2304     if (mAnalysisChannel) {
2305         delete mAnalysisChannel;
2306         mAnalysisChannel = NULL;
2307     }
2308 
2309     if (mDummyBatchChannel) {
2310         delete mDummyBatchChannel;
2311         mDummyBatchChannel = NULL;
2312     }
2313 
2314     if (mDepthChannel) {
2315         mDepthChannel = NULL;
2316     }
2317     mDepthCloudMode = CAM_PD_DATA_SKIP;
2318 
2319     mShutterDispatcher.clear();
2320     mOutputBufferDispatcher.clear();
2321 
2322     char is_type_value[PROPERTY_VALUE_MAX];
2323     property_get("persist.camera.is_type", is_type_value, "4");
2324     m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2325 
2326     char property_value[PROPERTY_VALUE_MAX];
2327     property_get("persist.camera.gzoom.at", property_value, "0");
2328     int goog_zoom_at = atoi(property_value);
2329     bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2330         gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2331     bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2332         gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2333 
2334     property_get("persist.camera.gzoom.4k", property_value, "0");
2335     bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2336 
2337     //Create metadata channel and initialize it
2338     cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2339     setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2340             gCamCapability[mCameraId]->color_arrangement);
2341     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2342                     mChannelHandle, mCameraHandle->ops, captureResultCb,
2343                     setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
2344     if (mMetadataChannel == NULL) {
2345         LOGE("failed to allocate metadata channel");
2346         rc = -ENOMEM;
2347         pthread_mutex_unlock(&mMutex);
2348         return rc;
2349     }
2350     mMetadataChannel->enableDepthData(depthPresent);
2351     rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2352     if (rc < 0) {
2353         LOGE("metadata channel initialization failed");
2354         delete mMetadataChannel;
2355         mMetadataChannel = NULL;
2356         pthread_mutex_unlock(&mMutex);
2357         return rc;
2358     }
2359 
2360     cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
2361     bool isRawStreamRequested = false;
2362     bool onlyRaw = true;
2363     // Keep track of preview/video streams indices.
2364     // There could be more than one preview streams, but only one video stream.
2365     int32_t video_stream_idx = -1;
2366     int32_t preview_stream_idx[streamList->num_streams];
2367     size_t preview_stream_cnt = 0;
2368     bool previewTnr[streamList->num_streams];
2369     memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2370     bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2371     // Loop through once to determine preview TNR conditions before creating channels.
2372     for (size_t i = 0; i < streamList->num_streams; i++) {
2373         camera3_stream_t *newStream = streamList->streams[i];
2374         uint32_t stream_usage = newStream->usage;
2375         if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2376                 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2377             if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2378                 video_stream_idx = (int32_t)i;
2379             else
2380                 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2381         }
2382     }
2383     // By default, preview stream TNR is disabled.
2384     // Enable TNR to the preview stream if all conditions below are satisfied:
2385     //  1. preview resolution == video resolution.
2386     //  2. video stream TNR is enabled.
2387     //  3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2388     for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2389         camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2390         camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2391         if (m_bTnrEnabled && m_bTnrVideo &&
2392                 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2393                 video_stream->width == preview_stream->width &&
2394                 video_stream->height == preview_stream->height) {
2395             previewTnr[preview_stream_idx[i]] = true;
2396         }
2397     }
2398 
2399     memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2400     /* Allocate channel objects for the requested streams */
2401     for (size_t i = 0; i < streamList->num_streams; i++) {
2402 
2403         camera3_stream_t *newStream = streamList->streams[i];
2404         uint32_t stream_usage = newStream->usage;
2405         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2406         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2407         struct camera_info *p_info = NULL;
2408         pthread_mutex_lock(&gCamLock);
2409         p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2410         pthread_mutex_unlock(&gCamLock);
2411         if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2412                 || IS_USAGE_ZSL(newStream->usage)) &&
2413             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
2414             onlyRaw = false; // There is non-raw stream - bypass flag if set
2415             mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2416             if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2417                 if (bUseCommonFeatureMask)
2418                     zsl_ppmask = commonFeatureMask;
2419                 else
2420                     zsl_ppmask = CAM_QCOM_FEATURE_NONE;
2421             } else {
2422                 if (numStreamsOnEncoder > 0)
2423                     zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2424                 else
2425                     zsl_ppmask = CAM_QCOM_FEATURE_NONE;
2426             }
2427             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
2428         } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
2429             onlyRaw = false; // There is non-raw stream - bypass flag if set
2430                 LOGH("Input stream configured, reprocess config");
2431         } else {
2432             //for non zsl streams find out the format
2433             switch (newStream->format) {
2434             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2435             {
2436                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2437                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2438                         CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2439                 /* add additional features to pp feature mask */
2440                 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2441                         mStreamConfigInfo.num_streams);
2442 
2443                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2444                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2445                                 CAM_STREAM_TYPE_VIDEO;
2446                     if (m_bTnrEnabled && m_bTnrVideo) {
2447                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2448                             CAM_QCOM_FEATURE_CPP_TNR;
2449                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2450                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2451                                 ~CAM_QCOM_FEATURE_CDS;
2452                     }
2453                     if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2454                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2455                             CAM_QTI_FEATURE_PPEISCORE;
2456                     }
2457                     if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2458                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2459                             CAM_QCOM_FEATURE_GOOG_ZOOM;
2460                     }
2461                 } else {
2462                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2463                             CAM_STREAM_TYPE_PREVIEW;
2464                     if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
2465                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2466                                 CAM_QCOM_FEATURE_CPP_TNR;
2467                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2468                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2469                                 ~CAM_QCOM_FEATURE_CDS;
2470                     }
2471                     if(!m_bSwTnrPreview) {
2472                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2473                                 ~CAM_QTI_FEATURE_SW_TNR;
2474                     }
2475                     if (is_goog_zoom_preview_enabled) {
2476                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2477                             CAM_QCOM_FEATURE_GOOG_ZOOM;
2478                     }
2479                     padding_info.width_padding = mSurfaceStridePadding;
2480                     padding_info.height_padding = CAM_PAD_TO_2;
2481                     previewSize.width = (int32_t)newStream->width;
2482                     previewSize.height = (int32_t)newStream->height;
2483                 }
2484                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2485                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2486                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2487                             newStream->height;
2488                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2489                             newStream->width;
2490                 }
2491             }
2492             break;
2493             case HAL_PIXEL_FORMAT_YCbCr_420_888:
2494                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2495                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2496                 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2497                     if (bUseCommonFeatureMask)
2498                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2499                                 commonFeatureMask;
2500                     else
2501                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2502                                 CAM_QCOM_FEATURE_NONE;
2503                 } else {
2504                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2505                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2506                 }
2507             break;
2508             case HAL_PIXEL_FORMAT_BLOB:
2509                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2510                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2511                 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2512                 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2513                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2514                              CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2515                      /* Remove rotation if it is not supported
2516                         for 4K LiveVideo snapshot case (online processing) */
2517                      if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2518                                 CAM_QCOM_FEATURE_ROTATION)) {
2519                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2520                                  &= ~CAM_QCOM_FEATURE_ROTATION;
2521                      }
2522                 } else {
2523                     if (bUseCommonFeatureMask &&
2524                             isOnEncoder(maxViewfinderSize, newStream->width,
2525                             newStream->height)) {
2526                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2527                     } else {
2528                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2529                     }
2530                 }
2531                 if (isZsl) {
2532                     if (zslStream) {
2533                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2534                                 (int32_t)zslStream->width;
2535                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2536                                 (int32_t)zslStream->height;
2537                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2538                                 zsl_ppmask;
2539                     } else {
2540                         LOGE("Error, No ZSL stream identified");
2541                         pthread_mutex_unlock(&mMutex);
2542                         return -EINVAL;
2543                     }
2544                 } else if (m_bIs4KVideo) {
2545                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2546                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2547                 } else if (bYuv888OverrideJpeg) {
2548                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2549                             (int32_t)largeYuv888Size.width;
2550                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2551                             (int32_t)largeYuv888Size.height;
2552                 }
2553                 break;
2554             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2555             case HAL_PIXEL_FORMAT_RAW16:
2556             case HAL_PIXEL_FORMAT_RAW10:
2557                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2558                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2559                 isRawStreamRequested = true;
2560                 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2561                         (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2562                     mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2563                             gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2564                     mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2565                             gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2566                     mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2567                             gCamCapability[mCameraId]->dt[mPDIndex];
2568                     mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2569                             gCamCapability[mCameraId]->vc[mPDIndex];
2570                 }
2571                 break;
2572             default:
2573                 onlyRaw = false; // There is non-raw stream - bypass flag if set
2574                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2575                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2576                 break;
2577             }
2578         }
2579 
2580         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2581                 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2582                 gCamCapability[mCameraId]->color_arrangement);
2583 
2584         if (newStream->priv == NULL) {
2585             //New stream, construct channel
2586             switch (newStream->stream_type) {
2587             case CAMERA3_STREAM_INPUT:
2588                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2589                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2590                 break;
2591             case CAMERA3_STREAM_BIDIRECTIONAL:
2592                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2593                     GRALLOC_USAGE_HW_CAMERA_WRITE;
2594                 break;
2595             case CAMERA3_STREAM_OUTPUT:
2596                 /* For video encoding stream, set read/write rarely
2597                  * flag so that they may be set to un-cached */
2598                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2599                     newStream->usage |=
2600                          (GRALLOC_USAGE_SW_READ_RARELY |
2601                          GRALLOC_USAGE_SW_WRITE_RARELY |
2602                          GRALLOC_USAGE_HW_CAMERA_WRITE);
2603                 else if (IS_USAGE_ZSL(newStream->usage))
2604                 {
2605                     LOGD("ZSL usage flag skipping");
2606                 }
2607                 else if (newStream == zslStream
2608                         || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2609                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2610                 } else
2611                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2612                 break;
2613             default:
2614                 LOGE("Invalid stream_type %d", newStream->stream_type);
2615                 break;
2616             }
2617 
2618             bool forcePreviewUBWC = true;
2619             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2620                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2621                 QCamera3ProcessingChannel *channel = NULL;
2622                 switch (newStream->format) {
2623                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2624                     if ((newStream->usage &
2625                             private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2626                             (streamList->operation_mode ==
2627                             CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2628                     ) {
2629                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2630                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
2631                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2632                                 this,
2633                                 newStream,
2634                                 (cam_stream_type_t)
2635                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2636                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2637                                 mMetadataChannel,
2638                                 0); //heap buffers are not required for HFR video channel
2639                         if (channel == NULL) {
2640                             LOGE("allocation of channel failed");
2641                             pthread_mutex_unlock(&mMutex);
2642                             return -ENOMEM;
2643                         }
2644                         //channel->getNumBuffers() will return 0 here so use
2645                         //MAX_INFLIGH_HFR_REQUESTS
2646                         newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2647                         newStream->priv = channel;
2648                         LOGI("num video buffers in HFR mode: %d",
2649                                  MAX_INFLIGHT_HFR_REQUESTS);
2650                     } else {
2651                         /* Copy stream contents in HFR preview only case to create
2652                          * dummy batch channel so that sensor streaming is in
2653                          * HFR mode */
2654                         if (!m_bIsVideo && (streamList->operation_mode ==
2655                                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2656                             mDummyBatchStream = *newStream;
2657                         }
2658                         int bufferCount = MAX_INFLIGHT_REQUESTS;
2659                         if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2660                                 CAM_STREAM_TYPE_VIDEO) {
2661                             if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2662                                 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2663                                 bufferCount = m_bIs4KVideo ?
2664                                     MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2665                             }
2666 
2667                         }
2668                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2669                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
2670                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2671                                 this,
2672                                 newStream,
2673                                 (cam_stream_type_t)
2674                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2675                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2676                                 mMetadataChannel,
2677                                 bufferCount);
2678                         if (channel == NULL) {
2679                             LOGE("allocation of channel failed");
2680                             pthread_mutex_unlock(&mMutex);
2681                             return -ENOMEM;
2682                         }
2683                         /* disable UBWC for preview, though supported,
2684                          * to take advantage of CPP duplication */
2685                         if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
2686                                 (previewSize.width == (int32_t)videoWidth)&&
2687                                 (previewSize.height == (int32_t)videoHeight)){
2688                             forcePreviewUBWC = false;
2689                         }
2690                         channel->setUBWCEnabled(forcePreviewUBWC);
2691                          /* When goog_zoom is linked to the preview or video stream,
2692                           * disable ubwc to the linked stream */
2693                         if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2694                                 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2695                             channel->setUBWCEnabled(false);
2696                         }
2697                         newStream->max_buffers = channel->getNumBuffers();
2698                         newStream->priv = channel;
2699                     }
2700                     break;
2701                 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2702                     channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2703                             mChannelHandle,
2704                             mCameraHandle->ops, captureResultCb,
2705                             setBufferErrorStatus, &padding_info,
2706                             this,
2707                             newStream,
2708                             (cam_stream_type_t)
2709                                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2710                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2711                             mMetadataChannel);
2712                     if (channel == NULL) {
2713                         LOGE("allocation of YUV channel failed");
2714                         pthread_mutex_unlock(&mMutex);
2715                         return -ENOMEM;
2716                     }
2717                     newStream->max_buffers = channel->getNumBuffers();
2718                     newStream->priv = channel;
2719                     break;
2720                 }
2721                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2722                 case HAL_PIXEL_FORMAT_RAW16:
2723                 case HAL_PIXEL_FORMAT_RAW10: {
2724                     bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2725                             (HAL_DATASPACE_DEPTH != newStream->data_space))
2726                             ? true : false;
2727                     mRawChannel = new QCamera3RawChannel(
2728                             mCameraHandle->camera_handle, mChannelHandle,
2729                             mCameraHandle->ops, captureResultCb,
2730                             setBufferErrorStatus, &padding_info,
2731                             this, newStream,
2732                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2733                             mMetadataChannel, isRAW16);
2734                     if (mRawChannel == NULL) {
2735                         LOGE("allocation of raw channel failed");
2736                         pthread_mutex_unlock(&mMutex);
2737                         return -ENOMEM;
2738                     }
2739                     newStream->max_buffers = mRawChannel->getNumBuffers();
2740                     newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2741                     break;
2742                 }
2743                 case HAL_PIXEL_FORMAT_BLOB:
2744                     if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2745                         mDepthChannel = new QCamera3DepthChannel(
2746                                 mCameraHandle->camera_handle, mChannelHandle,
2747                                 mCameraHandle->ops, NULL, NULL, &padding_info,
2748                                 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2749                                 mMetadataChannel);
2750                         if (NULL == mDepthChannel) {
2751                             LOGE("Allocation of depth channel failed");
2752                             pthread_mutex_unlock(&mMutex);
2753                             return NO_MEMORY;
2754                         }
2755                         newStream->priv = mDepthChannel;
2756                         newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2757                     } else {
2758                         // Max live snapshot inflight buffer is 1. This is to mitigate
2759                         // frame drop issues for video snapshot. The more buffers being
2760                         // allocated, the more frame drops there are.
2761                         mPictureChannel = new QCamera3PicChannel(
2762                                 mCameraHandle->camera_handle, mChannelHandle,
2763                                 mCameraHandle->ops, captureResultCb,
2764                                 setBufferErrorStatus, &padding_info, this, newStream,
2765                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2766                                 m_bIs4KVideo, isZsl, mMetadataChannel,
2767                                 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2768                         if (mPictureChannel == NULL) {
2769                             LOGE("allocation of channel failed");
2770                             pthread_mutex_unlock(&mMutex);
2771                             return -ENOMEM;
2772                         }
2773                         newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2774                         newStream->max_buffers = mPictureChannel->getNumBuffers();
2775                         mPictureChannel->overrideYuvSize(
2776                                 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2777                                 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2778                     }
2779                     break;
2780 
2781                 default:
2782                     LOGE("not a supported format 0x%x", newStream->format);
2783                     pthread_mutex_unlock(&mMutex);
2784                     return -EINVAL;
2785                 }
2786             } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2787                 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2788             } else {
2789                 LOGE("Error, Unknown stream type");
2790                 pthread_mutex_unlock(&mMutex);
2791                 return -EINVAL;
2792             }
2793 
2794             QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2795             if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2796                 // Here we only care whether it's EIS3 or not
2797                 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2798                 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2799                         mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2800                     isType = IS_TYPE_NONE;
2801                 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
2802                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2803                         newStream->width, newStream->height, forcePreviewUBWC, isType);
2804                 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2805                     newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2806                 }
2807             }
2808 
2809             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2810                     it != mStreamInfo.end(); it++) {
2811                 if ((*it)->stream == newStream) {
2812                     (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2813                     break;
2814                 }
2815             }
2816         } else {
2817             // Channel already exists for this stream
2818             // Do nothing for now
2819         }
2820         padding_info = gCamCapability[mCameraId]->padding_info;
2821 
2822         /* Do not add entries for input&depth stream in metastream info
2823          * since there is no real stream associated with it
2824          */
2825         if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
2826                 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2827                         (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
2828             mStreamConfigInfo.num_streams++;
2829         }
2830     }
2831 
2832     // Let buffer dispatcher know the configured streams.
2833     mOutputBufferDispatcher.configureStreams(streamList);
2834 
2835     if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2836         onlyRaw = false;
2837     }
2838 
2839     // Create analysis stream all the time, even when h/w support is not available
2840     if (!onlyRaw) {
2841         cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2842         cam_analysis_info_t analysisInfo;
2843         int32_t ret = NO_ERROR;
2844         ret = mCommon.getAnalysisInfo(
2845                 FALSE,
2846                 analysisFeatureMask,
2847                 &analysisInfo);
2848         if (ret == NO_ERROR) {
2849             cam_color_filter_arrangement_t analysis_color_arrangement =
2850                     (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2851                     CAM_FILTER_ARRANGEMENT_Y :
2852                     gCamCapability[mCameraId]->color_arrangement);
2853             setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2854                                                analysis_color_arrangement);
2855             cam_dimension_t analysisDim;
2856             analysisDim = mCommon.getMatchingDimension(previewSize,
2857                     analysisInfo.analysis_recommended_res);
2858 
2859             mAnalysisChannel = new QCamera3SupportChannel(
2860                     mCameraHandle->camera_handle,
2861                     mChannelHandle,
2862                     mCameraHandle->ops,
2863                     &analysisInfo.analysis_padding_info,
2864                     analysisFeatureMask,
2865                     CAM_STREAM_TYPE_ANALYSIS,
2866                     &analysisDim,
2867                     (analysisInfo.analysis_format
2868                     == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2869                     : CAM_FORMAT_YUV_420_NV21),
2870                     analysisInfo.hw_analysis_supported,
2871                     gCamCapability[mCameraId]->color_arrangement,
2872                     this,
2873                     0); // force buffer count to 0
2874         } else {
2875             LOGW("getAnalysisInfo failed, ret = %d", ret);
2876         }
2877         if (!mAnalysisChannel) {
2878             LOGW("Analysis channel cannot be created");
2879         }
2880     }
2881 
2882     //RAW DUMP channel
2883     if (mEnableRawDump && isRawStreamRequested == false){
2884         cam_dimension_t rawDumpSize;
2885         rawDumpSize = getMaxRawSize(mCameraId);
2886         cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2887         setPAAFSupport(rawDumpFeatureMask,
2888                 CAM_STREAM_TYPE_RAW,
2889                 gCamCapability[mCameraId]->color_arrangement);
2890         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2891                                   mChannelHandle,
2892                                   mCameraHandle->ops,
2893                                   rawDumpSize,
2894                                   &padding_info,
2895                                   this, rawDumpFeatureMask);
2896         if (!mRawDumpChannel) {
2897             LOGE("Raw Dump channel cannot be created");
2898             pthread_mutex_unlock(&mMutex);
2899             return -ENOMEM;
2900         }
2901     }
2902 
2903     if (mAnalysisChannel) {
2904         cam_analysis_info_t analysisInfo;
2905         memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2906         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2907                 CAM_STREAM_TYPE_ANALYSIS;
2908         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2909                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2910         rc = mCommon.getAnalysisInfo(FALSE,
2911                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2912                 &analysisInfo);
2913         if (rc != NO_ERROR) {
2914             LOGE("getAnalysisInfo failed, ret = %d", rc);
2915             pthread_mutex_unlock(&mMutex);
2916             return rc;
2917         }
2918         cam_color_filter_arrangement_t analysis_color_arrangement =
2919                 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2920                 CAM_FILTER_ARRANGEMENT_Y :
2921                 gCamCapability[mCameraId]->color_arrangement);
2922         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2923                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2924                 analysis_color_arrangement);
2925 
2926         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2927                 mCommon.getMatchingDimension(previewSize,
2928                 analysisInfo.analysis_recommended_res);
2929         mStreamConfigInfo.num_streams++;
2930     }
2931 
2932     if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2933         cam_analysis_info_t supportInfo;
2934         memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2935         cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2936         setPAAFSupport(callbackFeatureMask,
2937                 CAM_STREAM_TYPE_CALLBACK,
2938                 gCamCapability[mCameraId]->color_arrangement);
2939         int32_t ret = NO_ERROR;
2940         ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
2941         if (ret != NO_ERROR) {
2942             /* Ignore the error for Mono camera
2943              * because the PAAF bit mask is only set
2944              * for CAM_STREAM_TYPE_ANALYSIS stream type
2945              */
2946             if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2947                 LOGW("getAnalysisInfo failed, ret = %d", ret);
2948             }
2949         }
2950         mSupportChannel = new QCamera3SupportChannel(
2951                 mCameraHandle->camera_handle,
2952                 mChannelHandle,
2953                 mCameraHandle->ops,
2954                 &gCamCapability[mCameraId]->padding_info,
2955                 callbackFeatureMask,
2956                 CAM_STREAM_TYPE_CALLBACK,
2957                 &QCamera3SupportChannel::kDim,
2958                 CAM_FORMAT_YUV_420_NV21,
2959                 supportInfo.hw_analysis_supported,
2960                 gCamCapability[mCameraId]->color_arrangement,
2961                 this, 0);
2962         if (!mSupportChannel) {
2963             LOGE("dummy channel cannot be created");
2964             pthread_mutex_unlock(&mMutex);
2965             return -ENOMEM;
2966         }
2967     }
2968 
2969     if (mSupportChannel) {
2970         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2971                 QCamera3SupportChannel::kDim;
2972         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2973                 CAM_STREAM_TYPE_CALLBACK;
2974         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2975                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2976         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2977                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2978                 gCamCapability[mCameraId]->color_arrangement);
2979         mStreamConfigInfo.num_streams++;
2980     }
2981 
2982     if (mRawDumpChannel) {
2983         cam_dimension_t rawSize;
2984         rawSize = getMaxRawSize(mCameraId);
2985         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2986                 rawSize;
2987         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2988                 CAM_STREAM_TYPE_RAW;
2989         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2990                 CAM_QCOM_FEATURE_NONE;
2991         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2992                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2993                 gCamCapability[mCameraId]->color_arrangement);
2994         mStreamConfigInfo.num_streams++;
2995     }
2996 
2997     if (mHdrPlusRawSrcChannel) {
2998         cam_dimension_t rawSize;
2999         rawSize = getMaxRawSize(mCameraId);
3000         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
3001         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
3002         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
3003         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3004                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3005                 gCamCapability[mCameraId]->color_arrangement);
3006         mStreamConfigInfo.num_streams++;
3007     }
3008 
3009     /* In HFR mode, if video stream is not added, create a dummy channel so that
3010      * ISP can create a batch mode even for preview only case. This channel is
3011      * never 'start'ed (no stream-on), it is only 'initialized'  */
3012     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3013             !m_bIsVideo) {
3014         cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3015         setPAAFSupport(dummyFeatureMask,
3016                 CAM_STREAM_TYPE_VIDEO,
3017                 gCamCapability[mCameraId]->color_arrangement);
3018         mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3019                 mChannelHandle,
3020                 mCameraHandle->ops, captureResultCb,
3021                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
3022                 this,
3023                 &mDummyBatchStream,
3024                 CAM_STREAM_TYPE_VIDEO,
3025                 dummyFeatureMask,
3026                 mMetadataChannel);
3027         if (NULL == mDummyBatchChannel) {
3028             LOGE("creation of mDummyBatchChannel failed."
3029                     "Preview will use non-hfr sensor mode ");
3030         }
3031     }
3032     if (mDummyBatchChannel) {
3033         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3034                 mDummyBatchStream.width;
3035         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3036                 mDummyBatchStream.height;
3037         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3038                 CAM_STREAM_TYPE_VIDEO;
3039         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3040                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3041         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3042                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3043                 gCamCapability[mCameraId]->color_arrangement);
3044         mStreamConfigInfo.num_streams++;
3045     }
3046 
3047     mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3048     mStreamConfigInfo.buffer_info.max_buffers =
3049             m_bIs4KVideo ? 0 :
3050             m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
3051 
3052     /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3053     for (pendingRequestIterator i = mPendingRequestsList.begin();
3054             i != mPendingRequestsList.end();) {
3055         i = erasePendingRequest(i);
3056     }
3057     mPendingFrameDropList.clear();
3058     // Initialize/Reset the pending buffers list
3059     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3060         req.mPendingBufferList.clear();
3061     }
3062     mPendingBuffersMap.mPendingBuffersInRequest.clear();
3063     mExpectedInflightDuration = 0;
3064     mExpectedFrameDuration = 0;
3065 
3066     mCurJpegMeta.clear();
3067     //Get min frame duration for this streams configuration
3068     deriveMinFrameDuration();
3069 
3070     mFirstPreviewIntentSeen = false;
3071 
3072     // Update state
3073     mState = CONFIGURED;
3074 
3075     mFirstMetadataCallback = true;
3076 
3077     pthread_mutex_unlock(&mMutex);
3078 
3079     return rc;
3080 }
3081 
3082 /*===========================================================================
3083  * FUNCTION   : validateCaptureRequest
3084  *
3085  * DESCRIPTION: validate a capture request from camera service
3086  *
3087  * PARAMETERS :
3088  *   @request : request from framework to process
3089  *
3090  * RETURN     :
3091  *
3092  *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request,List<InternalRequest> & internallyRequestedStreams)3093 int QCamera3HardwareInterface::validateCaptureRequest(
3094                     camera3_capture_request_t *request,
3095                     List<InternalRequest> &internallyRequestedStreams)
3096 {
3097     ssize_t idx = 0;
3098     const camera3_stream_buffer_t *b;
3099     CameraMetadata meta;
3100 
3101     /* Sanity check the request */
3102     if (request == NULL) {
3103         LOGE("NULL capture request");
3104         return BAD_VALUE;
3105     }
3106 
3107     if ((request->settings == NULL) && (mState == CONFIGURED)) {
3108         /*settings cannot be null for the first request*/
3109         return BAD_VALUE;
3110     }
3111 
3112     uint32_t frameNumber = request->frame_number;
3113     if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3114             && (internallyRequestedStreams.size() == 0)) {
3115         LOGE("Request %d: No output buffers provided!",
3116                 __FUNCTION__, frameNumber);
3117         return BAD_VALUE;
3118     }
3119     if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3120         LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3121                  request->num_output_buffers, MAX_NUM_STREAMS);
3122         return BAD_VALUE;
3123     }
3124     if (request->input_buffer != NULL) {
3125         b = request->input_buffer;
3126         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3127             LOGE("Request %d: Buffer %ld: Status not OK!",
3128                      frameNumber, (long)idx);
3129             return BAD_VALUE;
3130         }
3131         if (b->release_fence != -1) {
3132             LOGE("Request %d: Buffer %ld: Has a release fence!",
3133                      frameNumber, (long)idx);
3134             return BAD_VALUE;
3135         }
3136         if (b->buffer == NULL) {
3137             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3138                      frameNumber, (long)idx);
3139             return BAD_VALUE;
3140         }
3141     }
3142 
3143     // Validate all buffers
3144     b = request->output_buffers;
3145     if (b == NULL) {
3146        return BAD_VALUE;
3147     }
3148     while (idx < (ssize_t)request->num_output_buffers) {
3149         QCamera3ProcessingChannel *channel =
3150                 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3151         if (channel == NULL) {
3152             LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3153                      frameNumber, (long)idx);
3154             return BAD_VALUE;
3155         }
3156         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3157             LOGE("Request %d: Buffer %ld: Status not OK!",
3158                      frameNumber, (long)idx);
3159             return BAD_VALUE;
3160         }
3161         if (b->release_fence != -1) {
3162             LOGE("Request %d: Buffer %ld: Has a release fence!",
3163                      frameNumber, (long)idx);
3164             return BAD_VALUE;
3165         }
3166         if (b->buffer == NULL) {
3167             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3168                      frameNumber, (long)idx);
3169             return BAD_VALUE;
3170         }
3171         if (*(b->buffer) == NULL) {
3172             LOGE("Request %d: Buffer %ld: NULL private handle!",
3173                      frameNumber, (long)idx);
3174             return BAD_VALUE;
3175         }
3176         idx++;
3177         b = request->output_buffers + idx;
3178     }
3179     return NO_ERROR;
3180 }
3181 
3182 /*===========================================================================
3183  * FUNCTION   : deriveMinFrameDuration
3184  *
3185  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3186  *              on currently configured streams.
3187  *
3188  * PARAMETERS : NONE
3189  *
3190  * RETURN     : NONE
3191  *
3192  *==========================================================================*/
deriveMinFrameDuration()3193 void QCamera3HardwareInterface::deriveMinFrameDuration()
3194 {
3195     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3196     bool hasRaw = false;
3197 
3198     mMinRawFrameDuration = 0;
3199     mMinJpegFrameDuration = 0;
3200     mMinProcessedFrameDuration = 0;
3201 
3202     maxJpegDim = 0;
3203     maxProcessedDim = 0;
3204     maxRawDim = 0;
3205 
3206     // Figure out maximum jpeg, processed, and raw dimensions
3207     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3208         it != mStreamInfo.end(); it++) {
3209 
3210         // Input stream doesn't have valid stream_type
3211         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3212             continue;
3213 
3214         int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3215         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3216             if (dimension > maxJpegDim)
3217                 maxJpegDim = dimension;
3218         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3219                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3220                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3221             hasRaw = true;
3222             if (dimension > maxRawDim)
3223                 maxRawDim = dimension;
3224         } else {
3225             if (dimension > maxProcessedDim)
3226                 maxProcessedDim = dimension;
3227         }
3228     }
3229 
3230     size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3231             MAX_SIZES_CNT);
3232 
3233     //Assume all jpeg dimensions are in processed dimensions.
3234     if (maxJpegDim > maxProcessedDim)
3235         maxProcessedDim = maxJpegDim;
3236     //Find the smallest raw dimension that is greater or equal to jpeg dimension
3237     if (hasRaw && maxProcessedDim > maxRawDim) {
3238         maxRawDim = INT32_MAX;
3239 
3240         for (size_t i = 0; i < count; i++) {
3241             int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3242                     gCamCapability[mCameraId]->raw_dim[i].height;
3243             if (dimension >= maxProcessedDim && dimension < maxRawDim)
3244                 maxRawDim = dimension;
3245         }
3246     }
3247 
3248     //Find minimum durations for processed, jpeg, and raw
3249     for (size_t i = 0; i < count; i++) {
3250         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3251                 gCamCapability[mCameraId]->raw_dim[i].height) {
3252             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3253             break;
3254         }
3255     }
3256     count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3257     for (size_t i = 0; i < count; i++) {
3258         if (maxProcessedDim ==
3259                 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3260                 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3261             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3262             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3263             break;
3264         }
3265     }
3266 }
3267 
3268 /*===========================================================================
3269  * FUNCTION   : getMinFrameDuration
3270  *
3271  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3272  *              and current request configuration.
3273  *
3274  * PARAMETERS : @request: requset sent by the frameworks
3275  *
3276  * RETURN     : min farme duration for a particular request
3277  *
3278  *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)3279 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3280 {
3281     bool hasJpegStream = false;
3282     bool hasRawStream = false;
3283     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3284         const camera3_stream_t *stream = request->output_buffers[i].stream;
3285         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3286             hasJpegStream = true;
3287         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3288                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3289                 stream->format == HAL_PIXEL_FORMAT_RAW16)
3290             hasRawStream = true;
3291     }
3292 
3293     if (!hasJpegStream)
3294         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3295     else
3296         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3297 }
3298 
3299 /*===========================================================================
3300  * FUNCTION   : handleBuffersDuringFlushLock
3301  *
3302  * DESCRIPTION: Account for buffers returned from back-end during flush
3303  *              This function is executed while mMutex is held by the caller.
3304  *
3305  * PARAMETERS :
3306  *   @buffer: image buffer for the callback
3307  *
3308  * RETURN     :
3309  *==========================================================================*/
handleBuffersDuringFlushLock(camera3_stream_buffer_t * buffer)3310 void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3311 {
3312     bool buffer_found = false;
3313     for (List<PendingBuffersInRequest>::iterator req =
3314             mPendingBuffersMap.mPendingBuffersInRequest.begin();
3315             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3316         for (List<PendingBufferInfo>::iterator i =
3317                 req->mPendingBufferList.begin();
3318                 i != req->mPendingBufferList.end(); i++) {
3319             if (i->buffer == buffer->buffer) {
3320                 mPendingBuffersMap.numPendingBufsAtFlush--;
3321                 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3322                     buffer->buffer, req->frame_number,
3323                     mPendingBuffersMap.numPendingBufsAtFlush);
3324                 buffer_found = true;
3325                 break;
3326             }
3327         }
3328         if (buffer_found) {
3329             break;
3330         }
3331     }
3332     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3333         //signal the flush()
3334         LOGD("All buffers returned to HAL. Continue flush");
3335         pthread_cond_signal(&mBuffersCond);
3336     }
3337 }
3338 
3339 /*===========================================================================
3340  * FUNCTION   : handleBatchMetadata
3341  *
3342  * DESCRIPTION: Handles metadata buffer callback in batch mode
3343  *
3344  * PARAMETERS : @metadata_buf: metadata buffer
3345  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3346  *                 the meta buf in this method
3347  *
3348  * RETURN     :
3349  *
3350  *==========================================================================*/
handleBatchMetadata(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf)3351 void QCamera3HardwareInterface::handleBatchMetadata(
3352         mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3353 {
3354     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
3355 
3356     if (NULL == metadata_buf) {
3357         LOGE("metadata_buf is NULL");
3358         return;
3359     }
3360     /* In batch mode, the metdata will contain the frame number and timestamp of
3361      * the last frame in the batch. Eg: a batch containing buffers from request
3362      * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3363      * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3364      * multiple process_capture_results */
3365     metadata_buffer_t *metadata =
3366             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3367     int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3368     uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3369     uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3370     uint32_t frame_number = 0, urgent_frame_number = 0;
3371     int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3372     bool invalid_metadata = false;
3373     size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3374     size_t loopCount = 1;
3375     bool is_metabuf_queued = false;
3376 
3377     int32_t *p_frame_number_valid =
3378             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3379     uint32_t *p_frame_number =
3380             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3381     int64_t *p_capture_time =
3382             POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3383     int32_t *p_urgent_frame_number_valid =
3384             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3385     uint32_t *p_urgent_frame_number =
3386             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3387 
3388     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3389             (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3390             (NULL == p_urgent_frame_number)) {
3391         LOGE("Invalid metadata");
3392         invalid_metadata = true;
3393     } else {
3394         frame_number_valid = *p_frame_number_valid;
3395         last_frame_number = *p_frame_number;
3396         last_frame_capture_time = *p_capture_time;
3397         urgent_frame_number_valid = *p_urgent_frame_number_valid;
3398         last_urgent_frame_number = *p_urgent_frame_number;
3399     }
3400 
3401     /* In batchmode, when no video buffers are requested, set_parms are sent
3402      * for every capture_request. The difference between consecutive urgent
3403      * frame numbers and frame numbers should be used to interpolate the
3404      * corresponding frame numbers and time stamps */
3405     pthread_mutex_lock(&mMutex);
3406     if (urgent_frame_number_valid) {
3407         ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3408         if(idx < 0) {
3409             LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3410                 last_urgent_frame_number);
3411             mState = ERROR;
3412             pthread_mutex_unlock(&mMutex);
3413             return;
3414         }
3415         first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
3416         urgentFrameNumDiff = last_urgent_frame_number + 1 -
3417                 first_urgent_frame_number;
3418 
3419         LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3420                  urgent_frame_number_valid,
3421                 first_urgent_frame_number, last_urgent_frame_number);
3422     }
3423 
3424     if (frame_number_valid) {
3425         ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3426         if(idx < 0) {
3427             LOGE("Invalid frame number received: %d. Irrecoverable error",
3428                 last_frame_number);
3429             mState = ERROR;
3430             pthread_mutex_unlock(&mMutex);
3431             return;
3432         }
3433         first_frame_number = mPendingBatchMap.valueAt(idx);
3434         frameNumDiff = last_frame_number + 1 -
3435                 first_frame_number;
3436         mPendingBatchMap.removeItem(last_frame_number);
3437 
3438         LOGD("frm: valid: %d frm_num: %d - %d",
3439                  frame_number_valid,
3440                 first_frame_number, last_frame_number);
3441 
3442     }
3443     pthread_mutex_unlock(&mMutex);
3444 
3445     if (urgent_frame_number_valid || frame_number_valid) {
3446         loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3447         if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3448             LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3449                      urgentFrameNumDiff, last_urgent_frame_number);
3450         if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3451             LOGE("frameNumDiff: %d frameNum: %d",
3452                      frameNumDiff, last_frame_number);
3453     }
3454 
3455     for (size_t i = 0; i < loopCount; i++) {
3456         /* handleMetadataWithLock is called even for invalid_metadata for
3457          * pipeline depth calculation */
3458         if (!invalid_metadata) {
3459             /* Infer frame number. Batch metadata contains frame number of the
3460              * last frame */
3461             if (urgent_frame_number_valid) {
3462                 if (i < urgentFrameNumDiff) {
3463                     urgent_frame_number =
3464                             first_urgent_frame_number + i;
3465                     LOGD("inferred urgent frame_number: %d",
3466                              urgent_frame_number);
3467                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3468                             CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3469                 } else {
3470                     /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3471                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3472                             CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3473                 }
3474             }
3475 
3476             /* Infer frame number. Batch metadata contains frame number of the
3477              * last frame */
3478             if (frame_number_valid) {
3479                 if (i < frameNumDiff) {
3480                     frame_number = first_frame_number + i;
3481                     LOGD("inferred frame_number: %d", frame_number);
3482                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3483                             CAM_INTF_META_FRAME_NUMBER, frame_number);
3484                 } else {
3485                     /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3486                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3487                              CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3488                 }
3489             }
3490 
3491             if (last_frame_capture_time) {
3492                 //Infer timestamp
3493                 first_frame_capture_time = last_frame_capture_time -
3494                         (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
3495                 capture_time =
3496                         first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
3497                 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3498                         CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3499                 LOGD("batch capture_time: %lld, capture_time: %lld",
3500                          last_frame_capture_time, capture_time);
3501             }
3502         }
3503         pthread_mutex_lock(&mMutex);
3504         handleMetadataWithLock(metadata_buf,
3505                 false /* free_and_bufdone_meta_buf */,
3506                 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3507                 (i == frameNumDiff-1), /* last metadata in the batch metadata */
3508                 &is_metabuf_queued /* if metabuf isqueued or not */);
3509         pthread_mutex_unlock(&mMutex);
3510     }
3511 
3512     /* BufDone metadata buffer */
3513     if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
3514         mMetadataChannel->bufDone(metadata_buf);
3515         free(metadata_buf);
3516         metadata_buf = NULL;
3517     }
3518 }
3519 
notifyError(uint32_t frameNumber,camera3_error_msg_code_t errorCode)3520 void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3521         camera3_error_msg_code_t errorCode)
3522 {
3523     camera3_notify_msg_t notify_msg;
3524     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3525     notify_msg.type = CAMERA3_MSG_ERROR;
3526     notify_msg.message.error.error_code = errorCode;
3527     notify_msg.message.error.error_stream = NULL;
3528     notify_msg.message.error.frame_number = frameNumber;
3529     orchestrateNotify(&notify_msg);
3530 
3531     return;
3532 }
3533 
3534 /*===========================================================================
3535  * FUNCTION   : sendPartialMetadataWithLock
3536  *
3537  * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3538  *
3539  * PARAMETERS : @metadata: metadata buffer
3540  *              @requestIter: The iterator for the pending capture request for
3541  *              which the partial result is being sen
3542  *              @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3543  *                  last urgent metadata in a batch. Always true for non-batch mode
3544  *              @isJumpstartMetadata: Whether this is a partial metadata for
3545  *              jumpstart, i.e. even though it doesn't map to a valid partial
3546  *              frame number, its metadata entries should be kept.
3547  *
3548  * RETURN     :
3549  *
3550  *==========================================================================*/
3551 
sendPartialMetadataWithLock(metadata_buffer_t * metadata,const pendingRequestIterator requestIter,bool lastUrgentMetadataInBatch,bool isJumpstartMetadata)3552 void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3553         metadata_buffer_t *metadata,
3554         const pendingRequestIterator requestIter,
3555         bool lastUrgentMetadataInBatch,
3556         bool isJumpstartMetadata)
3557 {
3558     camera3_capture_result_t result;
3559     memset(&result, 0, sizeof(camera3_capture_result_t));
3560 
3561     requestIter->partial_result_cnt++;
3562 
3563     // Extract 3A metadata
3564     result.result = translateCbUrgentMetadataToResultMetadata(
3565             metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3566             isJumpstartMetadata);
3567     // Populate metadata result
3568     result.frame_number = requestIter->frame_number;
3569     result.num_output_buffers = 0;
3570     result.output_buffers = NULL;
3571     result.partial_result = requestIter->partial_result_cnt;
3572 
3573     {
3574         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3575         if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3576             // Notify HDR+ client about the partial metadata.
3577             gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3578             result.partial_result == PARTIAL_RESULT_COUNT);
3579         }
3580     }
3581 
3582     orchestrateResult(&result);
3583     LOGD("urgent frame_number = %u", result.frame_number);
3584     free_camera_metadata((camera_metadata_t *)result.result);
3585 }
3586 
3587 /*===========================================================================
3588  * FUNCTION   : handleMetadataWithLock
3589  *
3590  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3591  *
3592  * PARAMETERS : @metadata_buf: metadata buffer
3593  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3594  *                 the meta buf in this method
3595  *              @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3596  *                  last urgent metadata in a batch. Always true for non-batch mode
3597  *              @lastMetadataInBatch: Boolean to indicate whether this is the
3598  *                  last metadata in a batch. Always true for non-batch mode
3599  *              @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3600  *                  buffer is enqueued or not.
3601  *
3602  * RETURN     :
3603  *
3604  *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf,bool lastUrgentMetadataInBatch,bool lastMetadataInBatch,bool * p_is_metabuf_queued)3605 void QCamera3HardwareInterface::handleMetadataWithLock(
3606     mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
3607     bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3608     bool *p_is_metabuf_queued)
3609 {
3610     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
3611     if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3612         //during flush do not send metadata from this thread
3613         LOGD("not sending metadata during flush or when mState is error");
3614         if (free_and_bufdone_meta_buf) {
3615             mMetadataChannel->bufDone(metadata_buf);
3616             free(metadata_buf);
3617         }
3618         return;
3619     }
3620 
3621     //not in flush
3622     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3623     int32_t frame_number_valid, urgent_frame_number_valid;
3624     uint32_t frame_number, urgent_frame_number;
3625     int64_t capture_time, capture_time_av;
3626     nsecs_t currentSysTime;
3627 
3628     int32_t *p_frame_number_valid =
3629             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3630     uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3631     int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3632     int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
3633     int32_t *p_urgent_frame_number_valid =
3634             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3635     uint32_t *p_urgent_frame_number =
3636             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3637     IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3638             metadata) {
3639         LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3640                  *p_frame_number_valid, *p_frame_number);
3641     }
3642 
3643     camera_metadata_t *resultMetadata = nullptr;
3644 
3645     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3646             (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3647         LOGE("Invalid metadata");
3648         if (free_and_bufdone_meta_buf) {
3649             mMetadataChannel->bufDone(metadata_buf);
3650             free(metadata_buf);
3651         }
3652         goto done_metadata;
3653     }
3654     frame_number_valid =        *p_frame_number_valid;
3655     frame_number =              *p_frame_number;
3656     capture_time =              *p_capture_time;
3657     capture_time_av =           *p_capture_time_av;
3658     urgent_frame_number_valid = *p_urgent_frame_number_valid;
3659     urgent_frame_number =       *p_urgent_frame_number;
3660     currentSysTime =            systemTime(CLOCK_MONOTONIC);
3661 
3662     if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3663         const int tries = 3;
3664         nsecs_t bestGap, measured;
3665         for (int i = 0; i < tries; ++i) {
3666             const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3667             const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3668             const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3669             const nsecs_t gap = tmono2 - tmono;
3670             if (i == 0 || gap < bestGap) {
3671                 bestGap = gap;
3672                 measured = tbase - ((tmono + tmono2) >> 1);
3673             }
3674         }
3675         capture_time -= measured;
3676     }
3677 
3678     // Detect if buffers from any requests are overdue
3679     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3680         int64_t timeout;
3681         {
3682             Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3683             // If there is a pending HDR+ request, the following requests may be blocked until the
3684             // HDR+ request is done. So allow a longer timeout.
3685             timeout = (mHdrPlusPendingRequests.size() > 0) ?
3686                     MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3687             if (timeout < mExpectedInflightDuration) {
3688                 timeout = mExpectedInflightDuration;
3689             }
3690         }
3691 
3692         if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
3693             for (auto &missed : req.mPendingBufferList) {
3694                 assert(missed.stream->priv);
3695                 if (missed.stream->priv) {
3696                     QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3697                     assert(ch->mStreams[0]);
3698                     if (ch->mStreams[0]) {
3699                         LOGE("Cancel missing frame = %d, buffer = %p,"
3700                             "stream type = %d, stream format = %d",
3701                             req.frame_number, missed.buffer,
3702                             ch->mStreams[0]->getMyType(), missed.stream->format);
3703                         ch->timeoutFrame(req.frame_number);
3704                     }
3705                 }
3706             }
3707         }
3708     }
3709     //For the very first metadata callback, regardless whether it contains valid
3710     //frame number, send the partial metadata for the jumpstarting requests.
3711     //Note that this has to be done even if the metadata doesn't contain valid
3712     //urgent frame number, because in the case only 1 request is ever submitted
3713     //to HAL, there won't be subsequent valid urgent frame number.
3714     if (mFirstMetadataCallback) {
3715         for (pendingRequestIterator i =
3716                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3717             if (i->bUseFirstPartial) {
3718                 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3719                         true /*isJumpstartMetadata*/);
3720             }
3721         }
3722         mFirstMetadataCallback = false;
3723     }
3724 
3725     //Partial result on process_capture_result for timestamp
3726     if (urgent_frame_number_valid) {
3727         LOGD("valid urgent frame_number = %u", urgent_frame_number);
3728 
3729         //Recieved an urgent Frame Number, handle it
3730         //using partial results
3731         for (pendingRequestIterator i =
3732                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3733             LOGD("Iterator Frame = %d urgent frame = %d",
3734                  i->frame_number, urgent_frame_number);
3735 
3736             if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
3737                     (i->partial_result_cnt == 0)) {
3738                 LOGE("Error: HAL missed urgent metadata for frame number %d",
3739                          i->frame_number);
3740                 i->partialResultDropped = true;
3741                 i->partial_result_cnt++;
3742             }
3743 
3744             if (i->frame_number == urgent_frame_number &&
3745                      i->partial_result_cnt == 0) {
3746                 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3747                         false /*isJumpstartMetadata*/);
3748                 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3749                     // Instant AEC settled for this frame.
3750                     LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3751                     mInstantAECSettledFrameNumber = urgent_frame_number;
3752                 }
3753                 break;
3754             }
3755         }
3756     }
3757 
3758     if (!frame_number_valid) {
3759         LOGD("Not a valid normal frame number, used as SOF only");
3760         if (free_and_bufdone_meta_buf) {
3761             mMetadataChannel->bufDone(metadata_buf);
3762             free(metadata_buf);
3763         }
3764         goto done_metadata;
3765     }
3766     LOGH("valid frame_number = %u, capture_time = %lld",
3767             frame_number, capture_time);
3768 
3769     handleDepthDataLocked(metadata->depth_data, frame_number,
3770             metadata->is_depth_data_valid);
3771 
3772     // Check whether any stream buffer corresponding to this is dropped or not
3773     // If dropped, then send the ERROR_BUFFER for the corresponding stream
3774     // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3775     for (auto & pendingRequest : mPendingRequestsList) {
3776         if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3777                     mInstantAECSettledFrameNumber)) {
3778             camera3_notify_msg_t notify_msg = {};
3779             for (auto & buffer : pendingRequest.buffers) {
3780                 bool dropFrame = false;
3781                 QCamera3ProcessingChannel *channel =
3782                         (QCamera3ProcessingChannel *)buffer.stream->priv;
3783                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3784                 if (p_cam_frame_drop) {
3785                     for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
3786                         if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
3787                             // Got the stream ID for drop frame.
3788                             dropFrame = true;
3789                             break;
3790                         }
3791                     }
3792                 } else {
3793                     // This is instant AEC case.
3794                     // For instant AEC drop the stream untill AEC is settled.
3795                     dropFrame = true;
3796                 }
3797 
3798                 if (dropFrame) {
3799                     // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3800                     if (p_cam_frame_drop) {
3801                         // Treat msg as error for system buffer drops
3802                         LOGE("Start of reporting error frame#=%u, streamID=%u",
3803                                  pendingRequest.frame_number, streamID);
3804                     } else {
3805                         // For instant AEC, inform frame drop and frame number
3806                         LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3807                                 "AEC settled frame number = %u",
3808                                 pendingRequest.frame_number, streamID,
3809                                 mInstantAECSettledFrameNumber);
3810                     }
3811                     notify_msg.type = CAMERA3_MSG_ERROR;
3812                     notify_msg.message.error.frame_number = pendingRequest.frame_number;
3813                     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
3814                     notify_msg.message.error.error_stream = buffer.stream;
3815                     orchestrateNotify(&notify_msg);
3816                     if (p_cam_frame_drop) {
3817                         // Treat msg as error for system buffer drops
3818                         LOGE("End of reporting error frame#=%u, streamID=%u",
3819                                 pendingRequest.frame_number, streamID);
3820                     } else {
3821                         // For instant AEC, inform frame drop and frame number
3822                         LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3823                                 "AEC settled frame number = %u",
3824                                 pendingRequest.frame_number, streamID,
3825                                 mInstantAECSettledFrameNumber);
3826                     }
3827                     PendingFrameDropInfo PendingFrameDrop;
3828                     PendingFrameDrop.frame_number = pendingRequest.frame_number;
3829                     PendingFrameDrop.stream_ID = streamID;
3830                     // Add the Frame drop info to mPendingFrameDropList
3831                     mPendingFrameDropList.push_back(PendingFrameDrop);
3832                 }
3833             }
3834         }
3835     }
3836 
3837     for (auto & pendingRequest : mPendingRequestsList) {
3838         // Find the pending request with the frame number.
3839         if (pendingRequest.frame_number < frame_number) {
3840             // Workaround for case where shutter is missing due to dropped
3841             // metadata
3842             if (!pendingRequest.hdrplus && (pendingRequest.input_buffer == nullptr)) {
3843                 mShutterDispatcher.markShutterReady(pendingRequest.frame_number, capture_time);
3844             }
3845         } else if (pendingRequest.frame_number == frame_number) {
3846             // Update the sensor timestamp.
3847             pendingRequest.timestamp = capture_time;
3848 
3849 
3850             /* Set the timestamp in display metadata so that clients aware of
3851                private_handle such as VT can use this un-modified timestamps.
3852                Camera framework is unaware of this timestamp and cannot change this */
3853             updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
3854 
3855             // Find channel requiring metadata, meaning internal offline postprocess
3856             // is needed.
3857             //TODO: for now, we don't support two streams requiring metadata at the same time.
3858             // (because we are not making copies, and metadata buffer is not reference counted.
3859             bool internalPproc = false;
3860             for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3861                     iter != pendingRequest.buffers.end(); iter++) {
3862                 if (iter->need_metadata) {
3863                     internalPproc = true;
3864                     QCamera3ProcessingChannel *channel =
3865                             (QCamera3ProcessingChannel *)iter->stream->priv;
3866                     channel->queueReprocMetadata(metadata_buf);
3867                     if(p_is_metabuf_queued != NULL) {
3868                         *p_is_metabuf_queued = true;
3869                     }
3870                     break;
3871                 }
3872             }
3873             for (auto itr = pendingRequest.internalRequestList.begin();
3874                   itr != pendingRequest.internalRequestList.end(); itr++) {
3875                 if (itr->need_metadata) {
3876                     internalPproc = true;
3877                     QCamera3ProcessingChannel *channel =
3878                             (QCamera3ProcessingChannel *)itr->stream->priv;
3879                     channel->queueReprocMetadata(metadata_buf);
3880                     break;
3881                 }
3882             }
3883 
3884             saveExifParams(metadata);
3885 
3886             bool *enableZsl = nullptr;
3887             if (gExposeEnableZslKey) {
3888                 enableZsl = &pendingRequest.enableZsl;
3889             }
3890 
3891             resultMetadata = translateFromHalMetadata(metadata,
3892                     pendingRequest, internalPproc,
3893                     lastMetadataInBatch, enableZsl);
3894 
3895             updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
3896 
3897             if (pendingRequest.blob_request) {
3898                 //Dump tuning metadata if enabled and available
3899                 char prop[PROPERTY_VALUE_MAX];
3900                 memset(prop, 0, sizeof(prop));
3901                 property_get("persist.camera.dumpmetadata", prop, "0");
3902                 int32_t enabled = atoi(prop);
3903                 if (enabled && metadata->is_tuning_params_valid) {
3904                     dumpMetadataToFile(metadata->tuning_params,
3905                            mMetaFrameCount,
3906                            enabled,
3907                            "Snapshot",
3908                            frame_number);
3909                 }
3910             }
3911 
3912             if (!internalPproc) {
3913                 LOGD("couldn't find need_metadata for this metadata");
3914                 // Return metadata buffer
3915                 if (free_and_bufdone_meta_buf) {
3916                     mMetadataChannel->bufDone(metadata_buf);
3917                     free(metadata_buf);
3918                 }
3919             }
3920 
3921             break;
3922         }
3923     }
3924 
3925     mShutterDispatcher.markShutterReady(frame_number, capture_time);
3926 
3927     // Try to send out capture result metadata.
3928     handlePendingResultMetadataWithLock(frame_number,  resultMetadata);
3929     return;
3930 
3931 done_metadata:
3932     for (pendingRequestIterator i = mPendingRequestsList.begin();
3933             i != mPendingRequestsList.end() ;i++) {
3934         i->pipeline_depth++;
3935     }
3936     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3937     unblockRequestIfNecessary();
3938 }
3939 
3940 /*===========================================================================
3941  * FUNCTION   : handleDepthDataWithLock
3942  *
3943  * DESCRIPTION: Handles incoming depth data
3944  *
3945  * PARAMETERS : @depthData  : Depth data
3946  *              @frameNumber: Frame number of the incoming depth data
3947  *              @valid      : Valid flag for the incoming data
3948  *
3949  * RETURN     :
3950  *
3951  *==========================================================================*/
handleDepthDataLocked(const cam_depth_data_t & depthData,uint32_t frameNumber,uint8_t valid)3952 void QCamera3HardwareInterface::handleDepthDataLocked(
3953         const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
3954     uint32_t currentFrameNumber;
3955     buffer_handle_t *depthBuffer;
3956 
3957     if (nullptr == mDepthChannel) {
3958         return;
3959     }
3960 
3961     camera3_stream_buffer_t resultBuffer =
3962         {.acquire_fence = -1,
3963          .release_fence = -1,
3964          .status = CAMERA3_BUFFER_STATUS_OK,
3965          .buffer = nullptr,
3966          .stream = mDepthChannel->getStream()};
3967     do {
3968         depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3969         if (nullptr == depthBuffer) {
3970             break;
3971         }
3972 
3973         resultBuffer.buffer = depthBuffer;
3974         if (currentFrameNumber == frameNumber) {
3975             if (valid) {
3976                 int32_t rc = mDepthChannel->populateDepthData(depthData,
3977                         frameNumber);
3978                 if (NO_ERROR != rc) {
3979                     resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3980                 } else {
3981                     resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3982                 }
3983             } else {
3984                 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3985             }
3986         } else if (currentFrameNumber > frameNumber) {
3987             break;
3988         } else {
3989             camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3990                     {{currentFrameNumber, mDepthChannel->getStream(),
3991                             CAMERA3_MSG_ERROR_BUFFER}}};
3992             orchestrateNotify(&notify_msg);
3993 
3994             LOGE("Depth buffer for frame number: %d is missing "
3995                     "returning back!", currentFrameNumber);
3996             resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3997         }
3998         mDepthChannel->unmapBuffer(currentFrameNumber);
3999         mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
4000     } while (currentFrameNumber < frameNumber);
4001 }
4002 
4003 /*===========================================================================
4004  * FUNCTION   : notifyErrorFoPendingDepthData
4005  *
4006  * DESCRIPTION: Returns error for any pending depth buffers
4007  *
4008  * PARAMETERS : depthCh - depth channel that needs to get flushed
4009  *
4010  * RETURN     :
4011  *
4012  *==========================================================================*/
notifyErrorFoPendingDepthData(QCamera3DepthChannel * depthCh)4013 void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4014         QCamera3DepthChannel *depthCh) {
4015     uint32_t currentFrameNumber;
4016     buffer_handle_t *depthBuffer;
4017 
4018     if (nullptr == depthCh) {
4019         return;
4020     }
4021 
4022     camera3_notify_msg_t notify_msg =
4023         {.type = CAMERA3_MSG_ERROR,
4024                 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4025     camera3_stream_buffer_t resultBuffer =
4026         {.acquire_fence = -1,
4027          .release_fence = -1,
4028          .buffer = nullptr,
4029          .stream = depthCh->getStream(),
4030          .status = CAMERA3_BUFFER_STATUS_ERROR};
4031 
4032     while (nullptr !=
4033             (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4034         depthCh->unmapBuffer(currentFrameNumber);
4035 
4036         notify_msg.message.error.frame_number = currentFrameNumber;
4037         orchestrateNotify(&notify_msg);
4038 
4039         mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
4040     };
4041 }
4042 
4043 /*===========================================================================
4044  * FUNCTION   : hdrPlusPerfLock
4045  *
4046  * DESCRIPTION: perf lock for HDR+ using custom intent
4047  *
4048  * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4049  *
4050  * RETURN     : None
4051  *
4052  *==========================================================================*/
hdrPlusPerfLock(mm_camera_super_buf_t * metadata_buf)4053 void QCamera3HardwareInterface::hdrPlusPerfLock(
4054         mm_camera_super_buf_t *metadata_buf)
4055 {
4056     if (NULL == metadata_buf) {
4057         LOGE("metadata_buf is NULL");
4058         return;
4059     }
4060     metadata_buffer_t *metadata =
4061             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4062     int32_t *p_frame_number_valid =
4063             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4064     uint32_t *p_frame_number =
4065             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4066 
4067     if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4068         LOGE("%s: Invalid metadata", __func__);
4069         return;
4070     }
4071 
4072     //acquire perf lock for 2 secs after the last HDR frame is captured
4073     constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
4074     if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4075         if ((p_frame_number != NULL) &&
4076                 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
4077             mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
4078         }
4079     }
4080 }
4081 
4082 /*===========================================================================
4083  * FUNCTION   : handleInputBufferWithLock
4084  *
4085  * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4086  *
4087  * PARAMETERS : @frame_number: frame number of the input buffer
4088  *
4089  * RETURN     :
4090  *
4091  *==========================================================================*/
handleInputBufferWithLock(uint32_t frame_number)4092 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4093 {
4094     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
4095     pendingRequestIterator i = mPendingRequestsList.begin();
4096     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4097         i++;
4098     }
4099     if (i != mPendingRequestsList.end() && i->input_buffer) {
4100         //found the right request
4101         CameraMetadata settings;
4102         nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4103         if(i->settings) {
4104             settings = i->settings;
4105             if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4106                 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
4107             } else {
4108                 LOGE("No timestamp in input settings! Using current one.");
4109             }
4110         } else {
4111             LOGE("Input settings missing!");
4112         }
4113 
4114         mShutterDispatcher.markShutterReady(frame_number, capture_time);
4115         LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4116                     i->frame_number, capture_time);
4117 
4118         camera3_capture_result result;
4119         memset(&result, 0, sizeof(camera3_capture_result));
4120         result.frame_number = frame_number;
4121         result.result = i->settings;
4122         result.input_buffer = i->input_buffer;
4123         result.partial_result = PARTIAL_RESULT_COUNT;
4124 
4125         orchestrateResult(&result);
4126         LOGD("Input request metadata and input buffer frame_number = %u",
4127                         i->frame_number);
4128         i = erasePendingRequest(i);
4129 
4130         // Dispatch result metadata that may be just unblocked by this reprocess result.
4131         dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
4132     } else {
4133         LOGE("Could not find input request for frame number %d", frame_number);
4134     }
4135 }
4136 
4137 /*===========================================================================
4138  * FUNCTION   : handleBufferWithLock
4139  *
4140  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4141  *
4142  * PARAMETERS : @buffer: image buffer for the callback
4143  *              @frame_number: frame number of the image buffer
4144  *
4145  * RETURN     :
4146  *
4147  *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)4148 void QCamera3HardwareInterface::handleBufferWithLock(
4149     camera3_stream_buffer_t *buffer, uint32_t frame_number)
4150 {
4151     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
4152 
4153     if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4154         mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4155     }
4156 
4157     /* Nothing to be done during error state */
4158     if ((ERROR == mState) || (DEINIT == mState)) {
4159         return;
4160     }
4161     if (mFlushPerf) {
4162         handleBuffersDuringFlushLock(buffer);
4163         return;
4164     }
4165     //not in flush
4166     // If the frame number doesn't exist in the pending request list,
4167     // directly send the buffer to the frameworks, and update pending buffers map
4168     // Otherwise, book-keep the buffer.
4169     pendingRequestIterator i = mPendingRequestsList.begin();
4170     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4171         i++;
4172     }
4173 
4174     if (i != mPendingRequestsList.end()) {
4175         if (i->input_buffer) {
4176             // For a reprocessing request, try to send out result metadata.
4177             handlePendingResultMetadataWithLock(frame_number, nullptr);
4178         }
4179     }
4180 
4181     // Check if this frame was dropped.
4182     for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4183             m != mPendingFrameDropList.end(); m++) {
4184         QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4185         uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4186         if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4187             buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4188             LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4189                      frame_number, streamID);
4190             m = mPendingFrameDropList.erase(m);
4191             break;
4192         }
4193     }
4194 
4195     // WAR for encoder avtimer timestamp issue
4196     QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4197     if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4198         m_bAVTimerEnabled) {
4199         for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4200             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4201             if (req->frame_number != frame_number)
4202                 continue;
4203             if(req->av_timestamp == 0) {
4204                 buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4205             }
4206             else {
4207                 struct private_handle_t *priv_handle =
4208                     (struct private_handle_t *) (*(buffer->buffer));
4209                 setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4210             }
4211         }
4212     }
4213 
4214     buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4215     LOGH("result frame_number = %d, buffer = %p",
4216              frame_number, buffer->buffer);
4217 
4218     mPendingBuffersMap.removeBuf(buffer->buffer);
4219     mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4220 
4221     if (mPreviewStarted == false) {
4222         QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4223         if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
4224             logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4225 
4226             mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4227             mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4228             mPreviewStarted = true;
4229 
4230             // Set power hint for preview
4231             mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4232         }
4233     }
4234 }
4235 
removeUnrequestedMetadata(pendingRequestIterator requestIter,camera_metadata_t * resultMetadata)4236 void QCamera3HardwareInterface::removeUnrequestedMetadata(pendingRequestIterator requestIter,
4237         camera_metadata_t *resultMetadata) {
4238     CameraMetadata metadata;
4239     metadata.acquire(resultMetadata);
4240 
4241     // Remove len shading map if it's not requested.
4242     if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF &&
4243             metadata.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE) &&
4244             metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF).data.u8[0] !=
4245             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4246         metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4247         metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4248             &requestIter->requestedLensShadingMapMode, 1);
4249     }
4250 
4251     // Remove face information if it's not requested.
4252     if (requestIter->requestedFaceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF &&
4253             metadata.exists(ANDROID_STATISTICS_FACE_DETECT_MODE) &&
4254             metadata.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0] !=
4255             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4256         metadata.erase(ANDROID_STATISTICS_FACE_RECTANGLES);
4257         metadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE,
4258                 &requestIter->requestedFaceDetectMode, 1);
4259     }
4260 
4261     requestIter->resultMetadata = metadata.release();
4262 }
4263 
handlePendingResultMetadataWithLock(uint32_t frameNumber,camera_metadata_t * resultMetadata)4264 void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
4265         camera_metadata_t *resultMetadata)
4266 {
4267     // Find the pending request for this result metadata.
4268     auto requestIter = mPendingRequestsList.begin();
4269     while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4270         requestIter++;
4271     }
4272 
4273     if (requestIter == mPendingRequestsList.end()) {
4274         ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4275         return;
4276     }
4277 
4278     // Update the result metadata
4279     requestIter->resultMetadata = resultMetadata;
4280 
4281     // Check what type of request this is.
4282     bool liveRequest = false;
4283     if (requestIter->hdrplus) {
4284         // HDR+ request doesn't have partial results.
4285         requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4286     } else if (requestIter->input_buffer != nullptr) {
4287         // Reprocessing request result is the same as settings.
4288         requestIter->resultMetadata = requestIter->settings;
4289         // Reprocessing request doesn't have partial results.
4290         requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4291     } else {
4292         liveRequest = true;
4293         requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4294         mPendingLiveRequest--;
4295 
4296         {
4297             std::unique_lock<std::mutex> l(gHdrPlusClientLock);
4298             // For a live request, send the metadata to HDR+ client.
4299             if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4300                 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4301                     requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4302             }
4303         }
4304     }
4305 
4306     if (requestIter->input_buffer == nullptr) {
4307         removeUnrequestedMetadata(requestIter, resultMetadata);
4308     }
4309 
4310     dispatchResultMetadataWithLock(frameNumber, liveRequest);
4311 }
4312 
dispatchResultMetadataWithLock(uint32_t frameNumber,bool isLiveRequest)4313 void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4314         bool isLiveRequest) {
4315     // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4316     // to be sent if all previous pending requests are ready to be sent.
4317     bool readyToSend = true;
4318 
4319     // Iterate through the pending requests to send out result metadata that are ready. Also if
4320     // this result metadata belongs to a live request, notify errors for previous live requests
4321     // that don't have result metadata yet.
4322     auto iter = mPendingRequestsList.begin();
4323     while (iter != mPendingRequestsList.end()) {
4324         // Check if current pending request is ready. If it's not ready, the following pending
4325         // requests are also not ready.
4326         if (readyToSend && iter->resultMetadata == nullptr) {
4327             readyToSend = false;
4328         }
4329 
4330         bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4331         bool errorResult = false;
4332 
4333         camera3_capture_result_t result = {};
4334         result.frame_number = iter->frame_number;
4335         result.result = iter->resultMetadata;
4336         result.partial_result = iter->partial_result_cnt;
4337 
4338         // If this pending buffer has result metadata, we may be able to send out shutter callback
4339         // and result metadata.
4340         if (iter->resultMetadata != nullptr) {
4341             if (!readyToSend) {
4342                 // If any of the previous pending request is not ready, this pending request is
4343                 // also not ready to send in order to keep shutter callbacks and result metadata
4344                 // in order.
4345                 iter++;
4346                 continue;
4347             }
4348             // Notify ERROR_RESULT if partial result was dropped.
4349             errorResult = iter->partialResultDropped;
4350         } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
4351             // If the result metadata belongs to a live request, notify errors for previous pending
4352             // live requests.
4353             mPendingLiveRequest--;
4354 
4355             LOGE("Error: HAL missed metadata for frame number %d", iter->frame_number);
4356             errorResult = true;
4357         } else {
4358             iter++;
4359             continue;
4360         }
4361 
4362         if (errorResult) {
4363             notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
4364         } else {
4365             result.output_buffers = nullptr;
4366             result.num_output_buffers = 0;
4367             orchestrateResult(&result);
4368         }
4369         // For reprocessing, result metadata is the same as settings so do not free it here to
4370         // avoid double free.
4371         if (result.result != iter->settings) {
4372             free_camera_metadata((camera_metadata_t *)result.result);
4373         }
4374         iter->resultMetadata = nullptr;
4375         iter = erasePendingRequest(iter);
4376     }
4377 
4378     if (isLiveRequest) {
4379         for (auto &iter : mPendingRequestsList) {
4380             // Increment pipeline depth for the following pending requests.
4381             if (iter.frame_number > frameNumber) {
4382                 iter.pipeline_depth++;
4383             }
4384         }
4385     }
4386 
4387     unblockRequestIfNecessary();
4388 }
4389 
4390 /*===========================================================================
4391  * FUNCTION   : unblockRequestIfNecessary
4392  *
4393  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4394  *              that mMutex is held when this function is called.
4395  *
4396  * PARAMETERS :
4397  *
4398  * RETURN     :
4399  *
4400  *==========================================================================*/
unblockRequestIfNecessary()4401 void QCamera3HardwareInterface::unblockRequestIfNecessary()
4402 {
4403    // Unblock process_capture_request
4404    pthread_cond_signal(&mRequestCond);
4405 }
4406 
4407 /*===========================================================================
4408  * FUNCTION   : isHdrSnapshotRequest
4409  *
4410  * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4411  *
4412  * PARAMETERS : camera3 request structure
4413  *
4414  * RETURN     : boolean decision variable
4415  *
4416  *==========================================================================*/
isHdrSnapshotRequest(camera3_capture_request * request)4417 bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4418 {
4419     if (request == NULL) {
4420         LOGE("Invalid request handle");
4421         assert(0);
4422         return false;
4423     }
4424 
4425     if (!mForceHdrSnapshot) {
4426         CameraMetadata frame_settings;
4427         frame_settings = request->settings;
4428 
4429         if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4430             uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4431             if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4432                 return false;
4433             }
4434         } else {
4435             return false;
4436         }
4437 
4438         if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4439             uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4440             if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4441                 return false;
4442             }
4443         } else {
4444             return false;
4445         }
4446     }
4447 
4448     for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4449         if (request->output_buffers[i].stream->format
4450                 == HAL_PIXEL_FORMAT_BLOB) {
4451             return true;
4452         }
4453     }
4454 
4455     return false;
4456 }
4457 /*===========================================================================
4458  * FUNCTION   : orchestrateRequest
4459  *
4460  * DESCRIPTION: Orchestrates a capture request from camera service
4461  *
4462  * PARAMETERS :
4463  *   @request : request from framework to process
4464  *
4465  * RETURN     : Error status codes
4466  *
4467  *==========================================================================*/
orchestrateRequest(camera3_capture_request_t * request)4468 int32_t QCamera3HardwareInterface::orchestrateRequest(
4469         camera3_capture_request_t *request)
4470 {
4471 
4472     uint32_t originalFrameNumber = request->frame_number;
4473     uint32_t originalOutputCount = request->num_output_buffers;
4474     const camera_metadata_t *original_settings = request->settings;
4475     List<InternalRequest> internallyRequestedStreams;
4476     List<InternalRequest> emptyInternalList;
4477 
4478     if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4479         LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4480         uint32_t internalFrameNumber;
4481         CameraMetadata modified_meta;
4482 
4483 
4484         /* Add Blob channel to list of internally requested streams */
4485         for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4486             if (request->output_buffers[i].stream->format
4487                     == HAL_PIXEL_FORMAT_BLOB) {
4488                 InternalRequest streamRequested;
4489                 streamRequested.meteringOnly = 1;
4490                 streamRequested.need_metadata = 0;
4491                 streamRequested.stream = request->output_buffers[i].stream;
4492                 internallyRequestedStreams.push_back(streamRequested);
4493             }
4494         }
4495         request->num_output_buffers = 0;
4496         auto itr =  internallyRequestedStreams.begin();
4497 
4498         /* Modify setting to set compensation */
4499         modified_meta = request->settings;
4500         int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4501         uint8_t aeLock = 1;
4502         modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4503         modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4504         camera_metadata_t *modified_settings = modified_meta.release();
4505         request->settings = modified_settings;
4506 
4507         /* Capture Settling & -2x frame */
4508         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4509         request->frame_number = internalFrameNumber;
4510         processCaptureRequest(request, internallyRequestedStreams);
4511 
4512         request->num_output_buffers = originalOutputCount;
4513         _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4514         request->frame_number = internalFrameNumber;
4515         processCaptureRequest(request, emptyInternalList);
4516         request->num_output_buffers = 0;
4517 
4518         modified_meta = modified_settings;
4519         expCompensation = 0;
4520         aeLock = 1;
4521         modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4522         modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4523         modified_settings = modified_meta.release();
4524         request->settings = modified_settings;
4525 
4526         /* Capture Settling & 0X frame */
4527 
4528         itr =  internallyRequestedStreams.begin();
4529         if (itr == internallyRequestedStreams.end()) {
4530             LOGE("Error Internally Requested Stream list is empty");
4531             assert(0);
4532         } else {
4533             itr->need_metadata = 0;
4534             itr->meteringOnly = 1;
4535         }
4536 
4537         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4538         request->frame_number = internalFrameNumber;
4539         processCaptureRequest(request, internallyRequestedStreams);
4540 
4541         itr =  internallyRequestedStreams.begin();
4542         if (itr == internallyRequestedStreams.end()) {
4543             ALOGE("Error Internally Requested Stream list is empty");
4544             assert(0);
4545         } else {
4546             itr->need_metadata = 1;
4547             itr->meteringOnly = 0;
4548         }
4549 
4550         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4551         request->frame_number = internalFrameNumber;
4552         processCaptureRequest(request, internallyRequestedStreams);
4553 
4554         /* Capture 2X frame*/
4555         modified_meta = modified_settings;
4556         expCompensation = GB_HDR_2X_STEP_EV;
4557         aeLock = 1;
4558         modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4559         modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4560         modified_settings = modified_meta.release();
4561         request->settings = modified_settings;
4562 
4563         itr =  internallyRequestedStreams.begin();
4564         if (itr == internallyRequestedStreams.end()) {
4565             ALOGE("Error Internally Requested Stream list is empty");
4566             assert(0);
4567         } else {
4568             itr->need_metadata = 0;
4569             itr->meteringOnly = 1;
4570         }
4571         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4572         request->frame_number = internalFrameNumber;
4573         processCaptureRequest(request, internallyRequestedStreams);
4574 
4575         itr =  internallyRequestedStreams.begin();
4576         if (itr == internallyRequestedStreams.end()) {
4577             ALOGE("Error Internally Requested Stream list is empty");
4578             assert(0);
4579         } else {
4580             itr->need_metadata = 1;
4581             itr->meteringOnly = 0;
4582         }
4583 
4584         _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4585         request->frame_number = internalFrameNumber;
4586         processCaptureRequest(request, internallyRequestedStreams);
4587 
4588 
4589         /* Capture 2X on original streaming config*/
4590         internallyRequestedStreams.clear();
4591 
4592         /* Restore original settings pointer */
4593         request->settings = original_settings;
4594     } else {
4595         uint32_t internalFrameNumber;
4596         _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4597         request->frame_number = internalFrameNumber;
4598         return processCaptureRequest(request, internallyRequestedStreams);
4599     }
4600 
4601     return NO_ERROR;
4602 }
4603 
4604 /*===========================================================================
4605  * FUNCTION   : orchestrateResult
4606  *
4607  * DESCRIPTION: Orchestrates a capture result to camera service
4608  *
4609  * PARAMETERS :
4610  *   @request : request from framework to process
4611  *
4612  * RETURN     :
4613  *
4614  *==========================================================================*/
orchestrateResult(camera3_capture_result_t * result)4615 void QCamera3HardwareInterface::orchestrateResult(
4616                     camera3_capture_result_t *result)
4617 {
4618     uint32_t frameworkFrameNumber;
4619     int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4620             frameworkFrameNumber);
4621     if (rc != NO_ERROR) {
4622         LOGE("Cannot find translated frameworkFrameNumber");
4623         assert(0);
4624     } else {
4625         if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4626             LOGD("Internal Request drop the result");
4627         } else {
4628             if (result->result != NULL) {
4629                 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4630                 camera_metadata_entry_t entry;
4631                 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4632                 if (ret == OK) {
4633                     int64_t sync_frame_number = frameworkFrameNumber;
4634                     ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4635                     if (ret != OK)
4636                         LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
4637                 }
4638             }
4639             result->frame_number = frameworkFrameNumber;
4640             mCallbackOps->process_capture_result(mCallbackOps, result);
4641         }
4642     }
4643 }
4644 
4645 /*===========================================================================
4646  * FUNCTION   : orchestrateNotify
4647  *
4648  * DESCRIPTION: Orchestrates a notify to camera service
4649  *
4650  * PARAMETERS :
4651  *   @request : request from framework to process
4652  *
4653  * RETURN     :
4654  *
4655  *==========================================================================*/
orchestrateNotify(camera3_notify_msg_t * notify_msg)4656 void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4657 {
4658     uint32_t frameworkFrameNumber;
4659     uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
4660     int32_t rc = NO_ERROR;
4661 
4662     rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
4663                                                           frameworkFrameNumber);
4664 
4665     if (rc != NO_ERROR) {
4666         if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4667             LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4668             frameworkFrameNumber = 0;
4669         } else {
4670             LOGE("Cannot find translated frameworkFrameNumber");
4671             assert(0);
4672             return;
4673         }
4674     }
4675 
4676     if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4677         LOGD("Internal Request drop the notifyCb");
4678     } else {
4679         notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4680         mCallbackOps->notify(mCallbackOps, notify_msg);
4681     }
4682 }
4683 
4684 /*===========================================================================
4685  * FUNCTION   : FrameNumberRegistry
4686  *
4687  * DESCRIPTION: Constructor
4688  *
4689  * PARAMETERS :
4690  *
4691  * RETURN     :
4692  *
4693  *==========================================================================*/
FrameNumberRegistry()4694 FrameNumberRegistry::FrameNumberRegistry()
4695 {
4696     _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4697 }
4698 
4699 /*===========================================================================
4700  * FUNCTION   : ~FrameNumberRegistry
4701  *
4702  * DESCRIPTION: Destructor
4703  *
4704  * PARAMETERS :
4705  *
4706  * RETURN     :
4707  *
4708  *==========================================================================*/
~FrameNumberRegistry()4709 FrameNumberRegistry::~FrameNumberRegistry()
4710 {
4711 }
4712 
4713 /*===========================================================================
4714  * FUNCTION   : PurgeOldEntriesLocked
4715  *
4716  * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4717  *
4718  * PARAMETERS :
4719  *
4720  * RETURN     : NONE
4721  *
4722  *==========================================================================*/
purgeOldEntriesLocked()4723 void FrameNumberRegistry::purgeOldEntriesLocked()
4724 {
4725     while (_register.begin() != _register.end()) {
4726         auto itr = _register.begin();
4727         if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4728             _register.erase(itr);
4729         } else {
4730             return;
4731         }
4732     }
4733 }
4734 
4735 /*===========================================================================
4736  * FUNCTION   : allocStoreInternalFrameNumber
4737  *
4738  * DESCRIPTION: Method to note down a framework request and associate a new
4739  *              internal request number against it
4740  *
4741  * PARAMETERS :
4742  *   @fFrameNumber: Identifier given by framework
4743  *   @internalFN  : Output parameter which will have the newly generated internal
4744  *                  entry
4745  *
4746  * RETURN     : Error code
4747  *
4748  *==========================================================================*/
allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,uint32_t & internalFrameNumber)4749 int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4750                                                             uint32_t &internalFrameNumber)
4751 {
4752     Mutex::Autolock lock(mRegistryLock);
4753     internalFrameNumber = _nextFreeInternalNumber++;
4754     LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4755     _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4756     purgeOldEntriesLocked();
4757     return NO_ERROR;
4758 }
4759 
4760 /*===========================================================================
4761  * FUNCTION   : generateStoreInternalFrameNumber
4762  *
4763  * DESCRIPTION: Method to associate a new internal request number independent
4764  *              of any associate with framework requests
4765  *
4766  * PARAMETERS :
4767  *   @internalFrame#: Output parameter which will have the newly generated internal
4768  *
4769  *
4770  * RETURN     : Error code
4771  *
4772  *==========================================================================*/
generateStoreInternalFrameNumber(uint32_t & internalFrameNumber)4773 int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4774 {
4775     Mutex::Autolock lock(mRegistryLock);
4776     internalFrameNumber = _nextFreeInternalNumber++;
4777     LOGD("Generated internal framenumber:%d", internalFrameNumber);
4778     _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4779     purgeOldEntriesLocked();
4780     return NO_ERROR;
4781 }
4782 
4783 /*===========================================================================
4784  * FUNCTION   : getFrameworkFrameNumber
4785  *
4786  * DESCRIPTION: Method to query the framework framenumber given an internal #
4787  *
4788  * PARAMETERS :
4789  *   @internalFrame#: Internal reference
4790  *   @frameworkframenumber: Output parameter holding framework frame entry
4791  *
4792  * RETURN     : Error code
4793  *
4794  *==========================================================================*/
getFrameworkFrameNumber(uint32_t internalFrameNumber,uint32_t & frameworkFrameNumber)4795 int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4796                                                      uint32_t &frameworkFrameNumber)
4797 {
4798     Mutex::Autolock lock(mRegistryLock);
4799     auto itr = _register.find(internalFrameNumber);
4800     if (itr == _register.end()) {
4801         LOGE("Cannot find internal#: %d", internalFrameNumber);
4802         return -ENOENT;
4803     }
4804 
4805     frameworkFrameNumber = itr->second;
4806     purgeOldEntriesLocked();
4807     return NO_ERROR;
4808 }
4809 
fillPbStreamConfig(pbcamera::StreamConfiguration * config,uint32_t pbStreamId,QCamera3Channel * channel,uint32_t streamIndex)4810 status_t QCamera3HardwareInterface::fillPbStreamConfig(
4811         pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
4812         uint32_t streamIndex) {
4813     if (config == nullptr) {
4814         LOGE("%s: config is null", __FUNCTION__);
4815         return BAD_VALUE;
4816     }
4817 
4818     if (channel == nullptr) {
4819         LOGE("%s: channel is null", __FUNCTION__);
4820         return BAD_VALUE;
4821     }
4822 
4823     QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4824     if (stream == nullptr) {
4825         LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4826         return NAME_NOT_FOUND;
4827     }
4828 
4829     const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4830     if (streamInfo == nullptr) {
4831         LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4832         return NAME_NOT_FOUND;
4833     }
4834 
4835     config->id = pbStreamId;
4836     config->image.width = streamInfo->dim.width;
4837     config->image.height = streamInfo->dim.height;
4838     config->image.padding = 0;
4839 
4840     int bytesPerPixel = 0;
4841 
4842     switch (streamInfo->fmt) {
4843         case CAM_FORMAT_YUV_420_NV21:
4844             config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
4845             bytesPerPixel = 1;
4846             break;
4847         case CAM_FORMAT_YUV_420_NV12:
4848         case CAM_FORMAT_YUV_420_NV12_VENUS:
4849             config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
4850             bytesPerPixel = 1;
4851             break;
4852         default:
4853             ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
4854             return BAD_VALUE;
4855     }
4856 
4857     uint32_t totalPlaneSize = 0;
4858 
4859     // Fill plane information.
4860     for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4861         pbcamera::PlaneConfiguration plane;
4862         plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
4863         plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4864         config->image.planes.push_back(plane);
4865 
4866         totalPlaneSize += (plane.stride * plane.scanline);
4867     }
4868 
4869     config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
4870     return OK;
4871 }
4872 
4873 /*===========================================================================
4874  * FUNCTION   : processCaptureRequest
4875  *
4876  * DESCRIPTION: process a capture request from camera service
4877  *
4878  * PARAMETERS :
4879  *   @request : request from framework to process
4880  *
4881  * RETURN     :
4882  *
4883  *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request,List<InternalRequest> & internallyRequestedStreams)4884 int QCamera3HardwareInterface::processCaptureRequest(
4885                     camera3_capture_request_t *request,
4886                     List<InternalRequest> &internallyRequestedStreams)
4887 {
4888     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
4889     int rc = NO_ERROR;
4890     int32_t request_id;
4891     CameraMetadata meta;
4892     bool isVidBufRequested = false;
4893     camera3_stream_buffer_t *pInputBuffer = NULL;
4894     char prop[PROPERTY_VALUE_MAX];
4895 
4896     pthread_mutex_lock(&mMutex);
4897 
4898     // Validate current state
4899     switch (mState) {
4900         case CONFIGURED:
4901         case STARTED:
4902             /* valid state */
4903             break;
4904 
4905         case ERROR:
4906             pthread_mutex_unlock(&mMutex);
4907             handleCameraDeviceError();
4908             return -ENODEV;
4909 
4910         default:
4911             LOGE("Invalid state %d", mState);
4912             pthread_mutex_unlock(&mMutex);
4913             return -ENODEV;
4914     }
4915 
4916     rc = validateCaptureRequest(request, internallyRequestedStreams);
4917     if (rc != NO_ERROR) {
4918         LOGE("incoming request is not valid");
4919         pthread_mutex_unlock(&mMutex);
4920         return rc;
4921     }
4922 
4923     meta = request->settings;
4924 
4925     // For first capture request, send capture intent, and
4926     // stream on all streams
4927     if (mState == CONFIGURED) {
4928         logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
4929         // send an unconfigure to the backend so that the isp
4930         // resources are deallocated
4931         if (!mFirstConfiguration) {
4932             cam_stream_size_info_t stream_config_info;
4933             int32_t hal_version = CAM_HAL_V3;
4934             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4935             stream_config_info.buffer_info.min_buffers =
4936                     MIN_INFLIGHT_REQUESTS;
4937             stream_config_info.buffer_info.max_buffers =
4938                     m_bIs4KVideo ? 0 :
4939                     m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
4940             clear_metadata_buffer(mParameters);
4941             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4942                     CAM_INTF_PARM_HAL_VERSION, hal_version);
4943             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4944                     CAM_INTF_META_STREAM_INFO, stream_config_info);
4945             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4946                     mParameters);
4947             if (rc < 0) {
4948                 LOGE("set_parms for unconfigure failed");
4949                 pthread_mutex_unlock(&mMutex);
4950                 return rc;
4951             }
4952 
4953         }
4954         mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
4955         /* get eis information for stream configuration */
4956         cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
4957         char is_type_value[PROPERTY_VALUE_MAX];
4958         property_get("persist.camera.is_type", is_type_value, "4");
4959         isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4960         // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4961         property_get("persist.camera.is_type_preview", is_type_value, "4");
4962         isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4963         LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
4964 
4965         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4966             int32_t hal_version = CAM_HAL_V3;
4967             uint8_t captureIntent =
4968                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4969             mCaptureIntent = captureIntent;
4970             clear_metadata_buffer(mParameters);
4971             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4972             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4973         }
4974         if (mFirstConfiguration) {
4975             // configure instant AEC
4976             // Instant AEC is a session based parameter and it is needed only
4977             // once per complete session after open camera.
4978             // i.e. This is set only once for the first capture request, after open camera.
4979             setInstantAEC(meta);
4980         }
4981         uint8_t fwkVideoStabMode=0;
4982         if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4983             fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4984         }
4985 
4986         // If EIS setprop is enabled then only turn it on for video/preview
4987         bool setEis = m_bEisEnable && m_bEisSupportedSize &&
4988                 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
4989         int32_t vsMode;
4990         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4991         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4992             rc = BAD_VALUE;
4993         }
4994         LOGD("setEis %d", setEis);
4995         bool eis3Supported = false;
4996         size_t count = IS_TYPE_MAX;
4997         count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4998         for (size_t i = 0; i < count; i++) {
4999             if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
5000                 eis3Supported = true;
5001                 break;
5002             }
5003         }
5004 
5005         //IS type will be 0 unless EIS is supported. If EIS is supported
5006         //it could either be 4 or 5 depending on the stream and video size
5007         for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5008             if (setEis) {
5009                 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
5010                     is_type = isTypePreview;
5011                 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
5012                     if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
5013                         LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
5014                         is_type = IS_TYPE_EIS_2_0;
5015                     } else {
5016                         is_type = isTypeVideo;
5017                     }
5018                 } else {
5019                     is_type = IS_TYPE_NONE;
5020                 }
5021                  mStreamConfigInfo.is_type[i] = is_type;
5022             } else {
5023                  mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
5024             }
5025         }
5026 
5027         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5028                 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
5029 
5030         //Disable tintless only if the property is set to 0
5031         memset(prop, 0, sizeof(prop));
5032         property_get("persist.camera.tintless.enable", prop, "1");
5033         int32_t tintless_value = atoi(prop);
5034 
5035         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5036                 CAM_INTF_PARM_TINTLESS, tintless_value);
5037 
5038         //Disable CDS for HFR mode or if DIS/EIS is on.
5039         //CDS is a session parameter in the backend/ISP, so need to be set/reset
5040         //after every configure_stream
5041         if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
5042                 (m_bIsVideo)) {
5043             int32_t cds = CAM_CDS_MODE_OFF;
5044             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5045                     CAM_INTF_PARM_CDS_MODE, cds))
5046                 LOGE("Failed to disable CDS for HFR mode");
5047 
5048         }
5049 
5050         if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
5051             uint8_t* use_av_timer = NULL;
5052 
5053             if (m_debug_avtimer){
5054                 LOGI(" Enabling AV timer through setprop");
5055                 use_av_timer = &m_debug_avtimer;
5056                 m_bAVTimerEnabled = true;
5057             }
5058             else{
5059                 use_av_timer =
5060                     meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
5061                 if (use_av_timer) {
5062                     m_bAVTimerEnabled = true;
5063                     LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
5064                 }
5065             }
5066 
5067             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
5068                 rc = BAD_VALUE;
5069             }
5070         }
5071 
5072         setMobicat();
5073 
5074         uint8_t nrMode = 0;
5075         if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5076             nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5077         }
5078 
5079         /* Set fps and hfr mode while sending meta stream info so that sensor
5080          * can configure appropriate streaming mode */
5081         mHFRVideoFps = DEFAULT_VIDEO_FPS;
5082         mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5083         mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
5084         if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5085             rc = setHalFpsRange(meta, mParameters);
5086             if (rc == NO_ERROR) {
5087                 int32_t max_fps =
5088                     (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
5089                 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
5090                     mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5091                 }
5092                 /* For HFR, more buffers are dequeued upfront to improve the performance */
5093                 if (mBatchSize) {
5094                     mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5095                     mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5096                 }
5097             }
5098             else {
5099                 LOGE("setHalFpsRange failed");
5100             }
5101         }
5102         if (meta.exists(ANDROID_CONTROL_MODE)) {
5103             uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5104             rc = extractSceneMode(meta, metaMode, mParameters);
5105             if (rc != NO_ERROR) {
5106                 LOGE("extractSceneMode failed");
5107             }
5108         }
5109         memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5110 
5111         if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5112             cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5113                     meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5114             rc = setVideoHdrMode(mParameters, vhdr);
5115             if (rc != NO_ERROR) {
5116                 LOGE("setVideoHDR is failed");
5117             }
5118         }
5119 
5120         if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
5121             uint8_t sensorModeFullFov =
5122                     meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
5123             LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5124             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5125                     sensorModeFullFov)) {
5126                 rc = BAD_VALUE;
5127             }
5128         }
5129         //TODO: validate the arguments, HSV scenemode should have only the
5130         //advertised fps ranges
5131 
5132         /*set the capture intent, hal version, tintless, stream info,
5133          *and disenable parameters to the backend*/
5134         LOGD("set_parms META_STREAM_INFO " );
5135         for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5136             LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5137                     ", Format:%d is_type: %d",
5138                     mStreamConfigInfo.type[i],
5139                     mStreamConfigInfo.stream_sizes[i].width,
5140                     mStreamConfigInfo.stream_sizes[i].height,
5141                     mStreamConfigInfo.postprocess_mask[i],
5142                     mStreamConfigInfo.format[i],
5143                     mStreamConfigInfo.is_type[i]);
5144         }
5145 
5146         rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5147                     mParameters);
5148         if (rc < 0) {
5149             LOGE("set_parms failed for hal version, stream info");
5150         }
5151 
5152         cam_sensor_mode_info_t sensorModeInfo = {};
5153         rc = getSensorModeInfo(sensorModeInfo);
5154         if (rc != NO_ERROR) {
5155             LOGE("Failed to get sensor output size");
5156             pthread_mutex_unlock(&mMutex);
5157             goto error_exit;
5158         }
5159 
5160         mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5161                 gCamCapability[mCameraId]->active_array_size.height,
5162                 sensorModeInfo.active_array_size.width,
5163                 sensorModeInfo.active_array_size.height);
5164 
5165         /* Set batchmode before initializing channel. Since registerBuffer
5166          * internally initializes some of the channels, better set batchmode
5167          * even before first register buffer */
5168         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5169             it != mStreamInfo.end(); it++) {
5170             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5171             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5172                     && mBatchSize) {
5173                 rc = channel->setBatchSize(mBatchSize);
5174                 //Disable per frame map unmap for HFR/batchmode case
5175                 rc |= channel->setPerFrameMapUnmap(false);
5176                 if (NO_ERROR != rc) {
5177                     LOGE("Channel init failed %d", rc);
5178                     pthread_mutex_unlock(&mMutex);
5179                     goto error_exit;
5180                 }
5181             }
5182         }
5183 
5184         //First initialize all streams
5185         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5186             it != mStreamInfo.end(); it++) {
5187             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5188 
5189             /* Initial value of NR mode is needed before stream on */
5190             channel->setNRMode(nrMode);
5191             if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5192                ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
5193                setEis) {
5194                 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5195                     if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5196                         is_type = mStreamConfigInfo.is_type[i];
5197                         break;
5198                     }
5199                 }
5200                 rc = channel->initialize(is_type);
5201             } else {
5202                 rc = channel->initialize(IS_TYPE_NONE);
5203             }
5204             if (NO_ERROR != rc) {
5205                 LOGE("Channel initialization failed %d", rc);
5206                 pthread_mutex_unlock(&mMutex);
5207                 goto error_exit;
5208             }
5209         }
5210 
5211         if (mRawDumpChannel) {
5212             rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5213             if (rc != NO_ERROR) {
5214                 LOGE("Error: Raw Dump Channel init failed");
5215                 pthread_mutex_unlock(&mMutex);
5216                 goto error_exit;
5217             }
5218         }
5219         if (mHdrPlusRawSrcChannel) {
5220             rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5221             if (rc != NO_ERROR) {
5222                 LOGE("Error: HDR+ RAW Source Channel init failed");
5223                 pthread_mutex_unlock(&mMutex);
5224                 goto error_exit;
5225             }
5226         }
5227         if (mSupportChannel) {
5228             rc = mSupportChannel->initialize(IS_TYPE_NONE);
5229             if (rc < 0) {
5230                 LOGE("Support channel initialization failed");
5231                 pthread_mutex_unlock(&mMutex);
5232                 goto error_exit;
5233             }
5234         }
5235         if (mAnalysisChannel) {
5236             rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5237             if (rc < 0) {
5238                 LOGE("Analysis channel initialization failed");
5239                 pthread_mutex_unlock(&mMutex);
5240                 goto error_exit;
5241             }
5242         }
5243         if (mDummyBatchChannel) {
5244             rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5245             if (rc < 0) {
5246                 LOGE("mDummyBatchChannel setBatchSize failed");
5247                 pthread_mutex_unlock(&mMutex);
5248                 goto error_exit;
5249             }
5250             rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
5251             if (rc < 0) {
5252                 LOGE("mDummyBatchChannel initialization failed");
5253                 pthread_mutex_unlock(&mMutex);
5254                 goto error_exit;
5255             }
5256         }
5257 
5258         // Set bundle info
5259         rc = setBundleInfo();
5260         if (rc < 0) {
5261             LOGE("setBundleInfo failed %d", rc);
5262             pthread_mutex_unlock(&mMutex);
5263             goto error_exit;
5264         }
5265 
5266         //update settings from app here
5267         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5268             mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5269             LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5270         }
5271         if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5272             mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5273             LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5274         }
5275         if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5276             mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5277             LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5278 
5279             if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5280                 (mLinkedCameraId != mCameraId) ) {
5281                 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5282                     mLinkedCameraId, mCameraId);
5283                 pthread_mutex_unlock(&mMutex);
5284                 goto error_exit;
5285             }
5286         }
5287 
5288         // add bundle related cameras
5289         LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5290         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5291             cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5292                     &m_pDualCamCmdPtr->bundle_info;
5293             m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
5294             if (mIsDeviceLinked)
5295                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5296             else
5297                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5298 
5299             pthread_mutex_lock(&gCamLock);
5300 
5301             if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5302                 LOGE("Dualcam: Invalid Session Id ");
5303                 pthread_mutex_unlock(&gCamLock);
5304                 pthread_mutex_unlock(&mMutex);
5305                 goto error_exit;
5306             }
5307 
5308             if (mIsMainCamera == 1) {
5309                 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5310                 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
5311                 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
5312                 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
5313                 // related session id should be session id of linked session
5314                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5315             } else {
5316                 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5317                 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
5318                 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
5319                 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
5320                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5321             }
5322             m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
5323             pthread_mutex_unlock(&gCamLock);
5324 
5325             rc = mCameraHandle->ops->set_dual_cam_cmd(
5326                     mCameraHandle->camera_handle);
5327             if (rc < 0) {
5328                 LOGE("Dualcam: link failed");
5329                 pthread_mutex_unlock(&mMutex);
5330                 goto error_exit;
5331             }
5332         }
5333         goto no_error;
5334 error_exit:
5335         mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
5336         return rc;
5337 no_error:
5338         mWokenUpByDaemon = false;
5339         mPendingLiveRequest = 0;
5340         mFirstConfiguration = false;
5341     }
5342 
5343     uint32_t frameNumber = request->frame_number;
5344     cam_stream_ID_t streamsArray;
5345 
5346     if (mFlushPerf) {
5347         //we cannot accept any requests during flush
5348         LOGE("process_capture_request cannot proceed during flush");
5349         pthread_mutex_unlock(&mMutex);
5350         return NO_ERROR; //should return an error
5351     }
5352 
5353     if (meta.exists(ANDROID_REQUEST_ID)) {
5354         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5355         mCurrentRequestId = request_id;
5356         LOGD("Received request with id: %d", request_id);
5357     } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5358         LOGE("Unable to find request id field, \
5359                 & no previous id available");
5360         pthread_mutex_unlock(&mMutex);
5361         return NAME_NOT_FOUND;
5362     } else {
5363         LOGD("Re-using old request id");
5364         request_id = mCurrentRequestId;
5365     }
5366 
5367     LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5368                                     request->num_output_buffers,
5369                                     request->input_buffer,
5370                                     frameNumber);
5371     // Acquire all request buffers first
5372     streamsArray.num_streams = 0;
5373     int blob_request = 0;
5374     bool depthRequestPresent = false;
5375     uint32_t snapshotStreamId = 0;
5376     for (size_t i = 0; i < request->num_output_buffers; i++) {
5377         const camera3_stream_buffer_t& output = request->output_buffers[i];
5378         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5379 
5380         if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5381                 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
5382             //FIXME??:Call function to store local copy of jpeg data for encode params.
5383             blob_request = 1;
5384             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5385         }
5386 
5387         if (output.acquire_fence != -1) {
5388            rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5389            close(output.acquire_fence);
5390            if (rc != OK) {
5391               LOGE("sync wait failed %d", rc);
5392               pthread_mutex_unlock(&mMutex);
5393               return rc;
5394            }
5395         }
5396 
5397         if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5398                 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
5399             depthRequestPresent = true;
5400             continue;
5401         }
5402 
5403         streamsArray.stream_request[streamsArray.num_streams++].streamID =
5404             channel->getStreamID(channel->getStreamTypeMask());
5405 
5406         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5407             isVidBufRequested = true;
5408         }
5409     }
5410 
5411     //FIXME: Add checks to ensure to dups in validateCaptureRequest
5412     for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5413           itr++) {
5414         QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5415         streamsArray.stream_request[streamsArray.num_streams++].streamID =
5416             channel->getStreamID(channel->getStreamTypeMask());
5417 
5418         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5419             isVidBufRequested = true;
5420         }
5421     }
5422 
5423     if (blob_request) {
5424         ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
5425         mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
5426     }
5427     if (blob_request && mRawDumpChannel) {
5428         LOGD("Trigger Raw based on blob request if Raw dump is enabled");
5429         streamsArray.stream_request[streamsArray.num_streams].streamID =
5430             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
5431         streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5432     }
5433 
5434     {
5435         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5436         // Request a RAW buffer if
5437         //  1. mHdrPlusRawSrcChannel is valid.
5438         //  2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5439         //  3. There is no pending HDR+ request.
5440         if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5441                 mHdrPlusPendingRequests.size() == 0) {
5442             streamsArray.stream_request[streamsArray.num_streams].streamID =
5443                 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5444             streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5445         }
5446     }
5447 
5448     //extract capture intent
5449     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5450         mCaptureIntent =
5451                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5452     }
5453 
5454     if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5455         mCacMode =
5456                 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5457     }
5458 
5459     uint8_t requestedLensShadingMapMode;
5460     // Get the shading map mode.
5461     if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5462         mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5463                 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5464     } else {
5465         requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5466     }
5467 
5468     if (meta.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
5469         mLastRequestedFaceDetectMode =
5470                 meta.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
5471     }
5472 
5473     bool hdrPlusRequest = false;
5474     HdrPlusPendingRequest pendingHdrPlusRequest = {};
5475 
5476     {
5477         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5478         // If this request has a still capture intent, try to submit an HDR+ request.
5479         if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5480                 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5481             hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5482         }
5483     }
5484 
5485     if (hdrPlusRequest) {
5486         // For a HDR+ request, just set the frame parameters.
5487         rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5488         if (rc < 0) {
5489             LOGE("fail to set frame parameters");
5490             pthread_mutex_unlock(&mMutex);
5491             return rc;
5492         }
5493     } else if(request->input_buffer == NULL) {
5494         /* Parse the settings:
5495          * - For every request in NORMAL MODE
5496          * - For every request in HFR mode during preview only case
5497          * - For first request of every batch in HFR mode during video
5498          * recording. In batchmode the same settings except frame number is
5499          * repeated in each request of the batch.
5500          */
5501         if (!mBatchSize ||
5502            (mBatchSize && !isVidBufRequested) ||
5503            (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
5504             rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5505             if (rc < 0) {
5506                 LOGE("fail to set frame parameters");
5507                 pthread_mutex_unlock(&mMutex);
5508                 return rc;
5509             }
5510 
5511             {
5512                 // If HDR+ mode is enabled, override the following modes so the necessary metadata
5513                 // will be included in the result metadata sent to Easel HDR+.
5514                 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5515                 if (mHdrPlusModeEnabled) {
5516                     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5517                         ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5518                     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STATS_FACEDETECT_MODE,
5519                         ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
5520                 }
5521             }
5522         }
5523         /* For batchMode HFR, setFrameParameters is not called for every
5524          * request. But only frame number of the latest request is parsed.
5525          * Keep track of first and last frame numbers in a batch so that
5526          * metadata for the frame numbers of batch can be duplicated in
5527          * handleBatchMetadta */
5528         if (mBatchSize) {
5529             if (!mToBeQueuedVidBufs) {
5530                 //start of the batch
5531                 mFirstFrameNumberInBatch = request->frame_number;
5532             }
5533             if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5534                 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5535                 LOGE("Failed to set the frame number in the parameters");
5536                 pthread_mutex_unlock(&mMutex);
5537                 return BAD_VALUE;
5538             }
5539         }
5540         if (mNeedSensorRestart) {
5541             /* Unlock the mutex as restartSensor waits on the channels to be
5542              * stopped, which in turn calls stream callback functions -
5543              * handleBufferWithLock and handleMetadataWithLock */
5544             pthread_mutex_unlock(&mMutex);
5545             rc = dynamicUpdateMetaStreamInfo();
5546             if (rc != NO_ERROR) {
5547                 LOGE("Restarting the sensor failed");
5548                 return BAD_VALUE;
5549             }
5550             mNeedSensorRestart = false;
5551             pthread_mutex_lock(&mMutex);
5552         }
5553         if(mResetInstantAEC) {
5554             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5555                     CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5556             mResetInstantAEC = false;
5557         }
5558     } else {
5559         if (request->input_buffer->acquire_fence != -1) {
5560            rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5561            close(request->input_buffer->acquire_fence);
5562            if (rc != OK) {
5563               LOGE("input buffer sync wait failed %d", rc);
5564               pthread_mutex_unlock(&mMutex);
5565               return rc;
5566            }
5567         }
5568     }
5569 
5570     if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5571         mLastCustIntentFrmNum = frameNumber;
5572     }
5573     /* Update pending request list and pending buffers map */
5574     PendingRequestInfo pendingRequest = {};
5575     pendingRequestIterator latestRequest;
5576     pendingRequest.frame_number = frameNumber;
5577     pendingRequest.num_buffers = depthRequestPresent ?
5578             (request->num_output_buffers - 1 ) : request->num_output_buffers;
5579     pendingRequest.request_id = request_id;
5580     pendingRequest.blob_request = blob_request;
5581     pendingRequest.timestamp = 0;
5582     pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
5583     pendingRequest.requestedFaceDetectMode = mLastRequestedFaceDetectMode;
5584     if (request->input_buffer) {
5585         pendingRequest.input_buffer =
5586                 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5587         *(pendingRequest.input_buffer) = *(request->input_buffer);
5588         pInputBuffer = pendingRequest.input_buffer;
5589     } else {
5590        pendingRequest.input_buffer = NULL;
5591        pInputBuffer = NULL;
5592     }
5593     pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
5594 
5595     pendingRequest.pipeline_depth = 0;
5596     pendingRequest.partial_result_cnt = 0;
5597     extractJpegMetadata(mCurJpegMeta, request);
5598     pendingRequest.jpegMetadata = mCurJpegMeta;
5599     pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5600     pendingRequest.capture_intent = mCaptureIntent;
5601     if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5602         pendingRequest.hybrid_ae_enable =
5603                 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5604     }
5605 
5606     /* DevCamDebug metadata processCaptureRequest */
5607     if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5608         mDevCamDebugMetaEnable =
5609                 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5610     }
5611     pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5612     /* DevCamDebug metadata end */
5613 
5614     //extract CAC info
5615     if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5616         mCacMode =
5617                 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5618     }
5619     pendingRequest.fwkCacMode = mCacMode;
5620     pendingRequest.hdrplus = hdrPlusRequest;
5621     pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5622     mExpectedInflightDuration += mExpectedFrameDuration;
5623 
5624     // extract enableZsl info
5625     if (gExposeEnableZslKey) {
5626         if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5627             pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5628             mZslEnabled = pendingRequest.enableZsl;
5629         } else {
5630             pendingRequest.enableZsl = mZslEnabled;
5631         }
5632     }
5633 
5634     PendingBuffersInRequest bufsForCurRequest;
5635     bufsForCurRequest.frame_number = frameNumber;
5636     // Mark current timestamp for the new request
5637     bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
5638     bufsForCurRequest.av_timestamp = 0;
5639     bufsForCurRequest.hdrplus = hdrPlusRequest;
5640 
5641     if (hdrPlusRequest) {
5642         // Save settings for this request.
5643         pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5644         memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5645 
5646         // Add to pending HDR+ request queue.
5647         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5648         mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5649 
5650         ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5651     }
5652 
5653     for (size_t i = 0; i < request->num_output_buffers; i++) {
5654         if ((request->output_buffers[i].stream->data_space ==
5655                 HAL_DATASPACE_DEPTH) &&
5656                 (HAL_PIXEL_FORMAT_BLOB ==
5657                         request->output_buffers[i].stream->format)) {
5658             continue;
5659         }
5660         RequestedBufferInfo requestedBuf;
5661         memset(&requestedBuf, 0, sizeof(requestedBuf));
5662         requestedBuf.stream = request->output_buffers[i].stream;
5663         requestedBuf.buffer = NULL;
5664         pendingRequest.buffers.push_back(requestedBuf);
5665 
5666         // Add to buffer handle the pending buffers list
5667         PendingBufferInfo bufferInfo;
5668         bufferInfo.buffer = request->output_buffers[i].buffer;
5669         bufferInfo.stream = request->output_buffers[i].stream;
5670         bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5671         QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5672         LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5673             frameNumber, bufferInfo.buffer,
5674             channel->getStreamTypeMask(), bufferInfo.stream->format);
5675     }
5676     // Add this request packet into mPendingBuffersMap
5677     mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5678     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5679         mPendingBuffersMap.get_num_overall_buffers());
5680 
5681     latestRequest = mPendingRequestsList.insert(
5682             mPendingRequestsList.end(), pendingRequest);
5683 
5684     // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5685     // for the frame number.
5686     mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
5687     for (size_t i = 0; i < request->num_output_buffers; i++) {
5688         mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5689     }
5690 
5691     if(mFlush) {
5692         LOGI("mFlush is true");
5693         pthread_mutex_unlock(&mMutex);
5694         return NO_ERROR;
5695     }
5696 
5697     // If this is not an HDR+ request, send the request to metadata and each output buffer's
5698     // channel.
5699     if (!hdrPlusRequest) {
5700         int indexUsed;
5701         // Notify metadata channel we receive a request
5702         mMetadataChannel->request(NULL, frameNumber, indexUsed);
5703 
5704         if(request->input_buffer != NULL){
5705             LOGD("Input request, frame_number %d", frameNumber);
5706             rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5707             if (NO_ERROR != rc) {
5708                 LOGE("fail to set reproc parameters");
5709                 pthread_mutex_unlock(&mMutex);
5710                 return rc;
5711             }
5712         }
5713 
5714         // Call request on other streams
5715         uint32_t streams_need_metadata = 0;
5716         pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5717         for (size_t i = 0; i < request->num_output_buffers; i++) {
5718             const camera3_stream_buffer_t& output = request->output_buffers[i];
5719             QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5720 
5721             if (channel == NULL) {
5722                 LOGW("invalid channel pointer for stream");
5723                 continue;
5724             }
5725 
5726             if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5727                 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5728                           output.buffer, request->input_buffer, frameNumber);
5729                 if(request->input_buffer != NULL){
5730                     rc = channel->request(output.buffer, frameNumber,
5731                             pInputBuffer, &mReprocMeta, indexUsed, false, false);
5732                     if (rc < 0) {
5733                         LOGE("Fail to request on picture channel");
5734                         pthread_mutex_unlock(&mMutex);
5735                         return rc;
5736                     }
5737                 } else {
5738                     if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5739                         assert(NULL != mDepthChannel);
5740                         assert(mDepthChannel == output.stream->priv);
5741 
5742                         rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5743                         if (rc < 0) {
5744                             LOGE("Fail to map on depth buffer");
5745                             pthread_mutex_unlock(&mMutex);
5746                             return rc;
5747                         }
5748                         continue;
5749                     } else {
5750                         LOGD("snapshot request with buffer %p, frame_number %d",
5751                                  output.buffer, frameNumber);
5752                         if (!request->settings) {
5753                             rc = channel->request(output.buffer, frameNumber,
5754                                     NULL, mPrevParameters, indexUsed);
5755                         } else {
5756                             rc = channel->request(output.buffer, frameNumber,
5757                                     NULL, mParameters, indexUsed);
5758                         }
5759                         if (rc < 0) {
5760                             LOGE("Fail to request on picture channel");
5761                             pthread_mutex_unlock(&mMutex);
5762                             return rc;
5763                         }
5764 
5765                         uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5766                         uint32_t j = 0;
5767                         for (j = 0; j < streamsArray.num_streams; j++) {
5768                             if (streamsArray.stream_request[j].streamID == streamId) {
5769                                 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5770                                     streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5771                                 else
5772                                     streamsArray.stream_request[j].buf_index = indexUsed;
5773                                 break;
5774                             }
5775                         }
5776                         if (j == streamsArray.num_streams) {
5777                             LOGE("Did not find matching stream to update index");
5778                             assert(0);
5779                         }
5780 
5781                         pendingBufferIter->need_metadata = true;
5782                         streams_need_metadata++;
5783                     }
5784                 }
5785             } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5786                 bool needMetadata = false;
5787                 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5788                 rc = yuvChannel->request(output.buffer, frameNumber,
5789                         pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5790                         needMetadata, indexUsed, false, false);
5791                 if (rc < 0) {
5792                     LOGE("Fail to request on YUV channel");
5793                     pthread_mutex_unlock(&mMutex);
5794                     return rc;
5795                 }
5796 
5797                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5798                 uint32_t j = 0;
5799                 for (j = 0; j < streamsArray.num_streams; j++) {
5800                     if (streamsArray.stream_request[j].streamID == streamId) {
5801                         if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5802                             streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5803                         else
5804                             streamsArray.stream_request[j].buf_index = indexUsed;
5805                         break;
5806                     }
5807                 }
5808                 if (j == streamsArray.num_streams) {
5809                     LOGE("Did not find matching stream to update index");
5810                     assert(0);
5811                 }
5812 
5813                 pendingBufferIter->need_metadata = needMetadata;
5814                 if (needMetadata)
5815                     streams_need_metadata += 1;
5816                 LOGD("calling YUV channel request, need_metadata is %d",
5817                          needMetadata);
5818             } else {
5819                 LOGD("request with buffer %p, frame_number %d",
5820                       output.buffer, frameNumber);
5821 
5822                 rc = channel->request(output.buffer, frameNumber, indexUsed);
5823 
5824                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5825                 uint32_t j = 0;
5826                 for (j = 0; j < streamsArray.num_streams; j++) {
5827                     if (streamsArray.stream_request[j].streamID == streamId) {
5828                         if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5829                             streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5830                         else
5831                             streamsArray.stream_request[j].buf_index = indexUsed;
5832                         break;
5833                     }
5834                 }
5835                 if (j == streamsArray.num_streams) {
5836                     LOGE("Did not find matching stream to update index");
5837                     assert(0);
5838                 }
5839 
5840                 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5841                         && mBatchSize) {
5842                     mToBeQueuedVidBufs++;
5843                     if (mToBeQueuedVidBufs == mBatchSize) {
5844                         channel->queueBatchBuf();
5845                     }
5846                 }
5847                 if (rc < 0) {
5848                     LOGE("request failed");
5849                     pthread_mutex_unlock(&mMutex);
5850                     return rc;
5851                 }
5852             }
5853             pendingBufferIter++;
5854         }
5855 
5856         for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5857               itr++) {
5858             QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5859 
5860             if (channel == NULL) {
5861                 LOGE("invalid channel pointer for stream");
5862                 assert(0);
5863                 pthread_mutex_unlock(&mMutex);
5864                 return BAD_VALUE;
5865             }
5866 
5867             InternalRequest requestedStream;
5868             requestedStream = (*itr);
5869 
5870 
5871             if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5872                 LOGD("snapshot request internally input buffer %p, frame_number %d",
5873                           request->input_buffer, frameNumber);
5874                 if(request->input_buffer != NULL){
5875                     rc = channel->request(NULL, frameNumber,
5876                             pInputBuffer, &mReprocMeta, indexUsed, true,
5877                             requestedStream.meteringOnly);
5878                     if (rc < 0) {
5879                         LOGE("Fail to request on picture channel");
5880                         pthread_mutex_unlock(&mMutex);
5881                         return rc;
5882                     }
5883                 } else {
5884                     LOGD("snapshot request with frame_number %d", frameNumber);
5885                     if (!request->settings) {
5886                         rc = channel->request(NULL, frameNumber,
5887                                 NULL, mPrevParameters, indexUsed, true,
5888                                 requestedStream.meteringOnly);
5889                     } else {
5890                         rc = channel->request(NULL, frameNumber,
5891                                 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5892                     }
5893                     if (rc < 0) {
5894                         LOGE("Fail to request on picture channel");
5895                         pthread_mutex_unlock(&mMutex);
5896                         return rc;
5897                     }
5898 
5899                     if ((*itr).meteringOnly != 1) {
5900                         requestedStream.need_metadata = 1;
5901                         streams_need_metadata++;
5902                     }
5903                 }
5904 
5905                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5906                 uint32_t j = 0;
5907                 for (j = 0; j < streamsArray.num_streams; j++) {
5908                     if (streamsArray.stream_request[j].streamID == streamId) {
5909                       if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5910                           streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5911                       else
5912                           streamsArray.stream_request[j].buf_index = indexUsed;
5913                         break;
5914                     }
5915                 }
5916                 if (j == streamsArray.num_streams) {
5917                     LOGE("Did not find matching stream to update index");
5918                     assert(0);
5919                 }
5920 
5921             } else {
5922                 LOGE("Internal requests not supported on this stream type");
5923                 assert(0);
5924                 pthread_mutex_unlock(&mMutex);
5925                 return INVALID_OPERATION;
5926             }
5927             latestRequest->internalRequestList.push_back(requestedStream);
5928         }
5929 
5930         //If 2 streams have need_metadata set to true, fail the request, unless
5931         //we copy/reference count the metadata buffer
5932         if (streams_need_metadata > 1) {
5933             LOGE("not supporting request in which two streams requires"
5934                     " 2 HAL metadata for reprocessing");
5935             pthread_mutex_unlock(&mMutex);
5936             return -EINVAL;
5937         }
5938 
5939         cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5940                 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5941         if (depthRequestPresent && mDepthChannel) {
5942             if (request->settings) {
5943                 camera_metadata_ro_entry entry;
5944                 if (find_camera_metadata_ro_entry(request->settings,
5945                         NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5946                     if (entry.data.u8[0]) {
5947                         pdafEnable = CAM_PD_DATA_ENABLED;
5948                     } else {
5949                         pdafEnable = CAM_PD_DATA_SKIP;
5950                     }
5951                     mDepthCloudMode = pdafEnable;
5952                 } else {
5953                     pdafEnable = mDepthCloudMode;
5954                 }
5955             } else {
5956                 pdafEnable = mDepthCloudMode;
5957             }
5958         }
5959 
5960         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5961                 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5962             LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5963             pthread_mutex_unlock(&mMutex);
5964             return BAD_VALUE;
5965         }
5966 
5967         if (request->input_buffer == NULL) {
5968             /* Set the parameters to backend:
5969              * - For every request in NORMAL MODE
5970              * - For every request in HFR mode during preview only case
5971              * - Once every batch in HFR mode during video recording
5972              */
5973             if (!mBatchSize ||
5974                (mBatchSize && !isVidBufRequested) ||
5975                (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5976                 LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5977                          mBatchSize, isVidBufRequested,
5978                         mToBeQueuedVidBufs);
5979 
5980                 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5981                     for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5982                         uint32_t m = 0;
5983                         for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5984                             if (streamsArray.stream_request[k].streamID ==
5985                                     mBatchedStreamsArray.stream_request[m].streamID)
5986                                 break;
5987                             }
5988                             if (m == mBatchedStreamsArray.num_streams) {
5989                                 mBatchedStreamsArray.stream_request\
5990                                     [mBatchedStreamsArray.num_streams].streamID =
5991                                     streamsArray.stream_request[k].streamID;
5992                                 mBatchedStreamsArray.stream_request\
5993                                     [mBatchedStreamsArray.num_streams].buf_index =
5994                                     streamsArray.stream_request[k].buf_index;
5995                                 mBatchedStreamsArray.num_streams =
5996                                     mBatchedStreamsArray.num_streams + 1;
5997                             }
5998                     }
5999                     streamsArray = mBatchedStreamsArray;
6000                 }
6001                 /* Update stream id of all the requested buffers */
6002                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
6003                         streamsArray)) {
6004                     LOGE("Failed to set stream type mask in the parameters");
6005                     pthread_mutex_unlock(&mMutex);
6006                     return BAD_VALUE;
6007                 }
6008 
6009                 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
6010                         mParameters);
6011                 if (rc < 0) {
6012                     LOGE("set_parms failed");
6013                 }
6014                 /* reset to zero coz, the batch is queued */
6015                 mToBeQueuedVidBufs = 0;
6016                 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
6017                 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
6018             } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
6019                 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
6020                     uint32_t m = 0;
6021                     for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
6022                         if (streamsArray.stream_request[k].streamID ==
6023                                 mBatchedStreamsArray.stream_request[m].streamID)
6024                             break;
6025                     }
6026                     if (m == mBatchedStreamsArray.num_streams) {
6027                         mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
6028                                 streamID = streamsArray.stream_request[k].streamID;
6029                         mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
6030                                 buf_index = streamsArray.stream_request[k].buf_index;
6031                         mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
6032                     }
6033                 }
6034             }
6035             mPendingLiveRequest++;
6036 
6037             // Start all streams after the first setting is sent, so that the
6038             // setting can be applied sooner: (0 + apply_delay)th frame.
6039             if (mState == CONFIGURED && mChannelHandle) {
6040                 //Then start them.
6041                 LOGH("Start META Channel");
6042                 rc = mMetadataChannel->start();
6043                 if (rc < 0) {
6044                     LOGE("META channel start failed");
6045                     pthread_mutex_unlock(&mMutex);
6046                     return rc;
6047                 }
6048 
6049                 if (mAnalysisChannel) {
6050                     rc = mAnalysisChannel->start();
6051                     if (rc < 0) {
6052                         LOGE("Analysis channel start failed");
6053                         mMetadataChannel->stop();
6054                         pthread_mutex_unlock(&mMutex);
6055                         return rc;
6056                     }
6057                 }
6058 
6059                 if (mSupportChannel) {
6060                     rc = mSupportChannel->start();
6061                     if (rc < 0) {
6062                         LOGE("Support channel start failed");
6063                         mMetadataChannel->stop();
6064                         /* Although support and analysis are mutually exclusive today
6065                            adding it in anycase for future proofing */
6066                         if (mAnalysisChannel) {
6067                             mAnalysisChannel->stop();
6068                         }
6069                         pthread_mutex_unlock(&mMutex);
6070                         return rc;
6071                     }
6072                 }
6073                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6074                         it != mStreamInfo.end(); it++) {
6075                     QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6076                     LOGH("Start Processing Channel mask=%d",
6077                             channel->getStreamTypeMask());
6078                     rc = channel->start();
6079                     if (rc < 0) {
6080                         LOGE("channel start failed");
6081                         pthread_mutex_unlock(&mMutex);
6082                         return rc;
6083                     }
6084                 }
6085 
6086                 if (mRawDumpChannel) {
6087                     LOGD("Starting raw dump stream");
6088                     rc = mRawDumpChannel->start();
6089                     if (rc != NO_ERROR) {
6090                         LOGE("Error Starting Raw Dump Channel");
6091                         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6092                               it != mStreamInfo.end(); it++) {
6093                             QCamera3Channel *channel =
6094                                 (QCamera3Channel *)(*it)->stream->priv;
6095                             LOGH("Stopping Processing Channel mask=%d",
6096                                 channel->getStreamTypeMask());
6097                             channel->stop();
6098                         }
6099                         if (mSupportChannel)
6100                             mSupportChannel->stop();
6101                         if (mAnalysisChannel) {
6102                             mAnalysisChannel->stop();
6103                         }
6104                         mMetadataChannel->stop();
6105                         pthread_mutex_unlock(&mMutex);
6106                         return rc;
6107                     }
6108                 }
6109 
6110                 // Configure modules for stream on.
6111                 rc = startChannelLocked();
6112                 if (rc != NO_ERROR) {
6113                     LOGE("startChannelLocked failed %d", rc);
6114                     pthread_mutex_unlock(&mMutex);
6115                     return rc;
6116                 }
6117             }
6118         }
6119     }
6120 
6121     // Enable HDR+ mode for the first PREVIEW_INTENT request that doesn't disable HDR+.
6122     {
6123         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6124         if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
6125                 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6126                 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6127                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6128                 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
6129                 meta.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
6130                 meta.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 0) {
6131 
6132             if (isSessionHdrPlusModeCompatible()) {
6133                 rc = enableHdrPlusModeLocked();
6134                 if (rc != OK) {
6135                     LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6136                     pthread_mutex_unlock(&mMutex);
6137                     return rc;
6138                 }
6139             }
6140 
6141             mFirstPreviewIntentSeen = true;
6142         }
6143     }
6144 
6145     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6146 
6147     mState = STARTED;
6148     // Added a timed condition wait
6149     struct timespec ts;
6150     uint8_t isValidTimeout = 1;
6151     rc = clock_gettime(CLOCK_MONOTONIC, &ts);
6152     if (rc < 0) {
6153       isValidTimeout = 0;
6154       LOGE("Error reading the real time clock!!");
6155     }
6156     else {
6157       // Make timeout as 5 sec for request to be honored
6158       int64_t timeout = 5;
6159       {
6160           Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6161           // If there is a pending HDR+ request, the following requests may be blocked until the
6162           // HDR+ request is done. So allow a longer timeout.
6163           if (mHdrPlusPendingRequests.size() > 0) {
6164               timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6165           }
6166       }
6167       ts.tv_sec += timeout;
6168     }
6169     //Block on conditional variable
6170     while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
6171             (mState != ERROR) && (mState != DEINIT)) {
6172         if (!isValidTimeout) {
6173             LOGD("Blocking on conditional wait");
6174             pthread_cond_wait(&mRequestCond, &mMutex);
6175         }
6176         else {
6177             LOGD("Blocking on timed conditional wait");
6178             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6179             if (rc == ETIMEDOUT) {
6180                 rc = -ENODEV;
6181                 LOGE("Unblocked on timeout!!!!");
6182                 break;
6183             }
6184         }
6185         LOGD("Unblocked");
6186         if (mWokenUpByDaemon) {
6187             mWokenUpByDaemon = false;
6188             if (mPendingLiveRequest < mMaxInFlightRequests)
6189                 break;
6190         }
6191     }
6192     pthread_mutex_unlock(&mMutex);
6193 
6194     return rc;
6195 }
6196 
startChannelLocked()6197 int32_t QCamera3HardwareInterface::startChannelLocked()
6198 {
6199     // Configure modules for stream on.
6200     int32_t rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6201             mChannelHandle, /*start_sensor_streaming*/false);
6202     if (rc != NO_ERROR) {
6203         LOGE("start_channel failed %d", rc);
6204         return rc;
6205     }
6206 
6207     {
6208         // Configure Easel for stream on.
6209         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6210         if (EaselManagerClientOpened) {
6211             // Now that sensor mode should have been selected, get the selected sensor mode
6212             // info.
6213             memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6214             rc = getCurrentSensorModeInfo(mSensorModeInfo);
6215             if (rc != NO_ERROR) {
6216                 ALOGE("%s: Get current sensor mode failed, bail out: %s (%d).", __FUNCTION__,
6217                         strerror(-rc), rc);
6218                 return rc;
6219             }
6220             logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
6221             rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6222                     /*enableCapture*/true);
6223             if (rc != OK) {
6224                 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6225                         mCameraId, mSensorModeInfo.op_pixel_clk);
6226                 return rc;
6227             }
6228             logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
6229             mEaselMipiStarted = true;
6230         }
6231     }
6232 
6233     // Start sensor streaming.
6234     rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6235             mChannelHandle);
6236     if (rc != NO_ERROR) {
6237         LOGE("start_sensor_stream_on failed %d", rc);
6238         return rc;
6239     }
6240 
6241     return 0;
6242 }
6243 
stopChannelLocked(bool stopChannelImmediately)6244 void QCamera3HardwareInterface::stopChannelLocked(bool stopChannelImmediately)
6245 {
6246     mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6247             mChannelHandle, stopChannelImmediately);
6248 
6249     {
6250         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6251         if (EaselManagerClientOpened && mEaselMipiStarted) {
6252             int32_t rc = gEaselManagerClient->stopMipi(mCameraId);
6253             if (rc != 0) {
6254                 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
6255             }
6256             mEaselMipiStarted = false;
6257         }
6258     }
6259 }
6260 
6261 /*===========================================================================
6262  * FUNCTION   : dump
6263  *
6264  * DESCRIPTION:
6265  *
6266  * PARAMETERS :
6267  *
6268  *
6269  * RETURN     :
6270  *==========================================================================*/
dump(int fd)6271 void QCamera3HardwareInterface::dump(int fd)
6272 {
6273     pthread_mutex_lock(&mMutex);
6274     dprintf(fd, "\n Camera HAL3 information Begin \n");
6275 
6276     dprintf(fd, "\nNumber of pending requests: %zu \n",
6277         mPendingRequestsList.size());
6278     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6279     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
6280     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6281     for(pendingRequestIterator i = mPendingRequestsList.begin();
6282             i != mPendingRequestsList.end(); i++) {
6283         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6284         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6285         i->input_buffer);
6286     }
6287     dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6288                 mPendingBuffersMap.get_num_overall_buffers());
6289     dprintf(fd, "-------+------------------\n");
6290     dprintf(fd, " Frame | Stream type mask \n");
6291     dprintf(fd, "-------+------------------\n");
6292     for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6293         for(auto &j : req.mPendingBufferList) {
6294             QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6295             dprintf(fd, " %5d | %11d \n",
6296                     req.frame_number, channel->getStreamTypeMask());
6297         }
6298     }
6299     dprintf(fd, "-------+------------------\n");
6300 
6301     dprintf(fd, "\nPending frame drop list: %zu\n",
6302         mPendingFrameDropList.size());
6303     dprintf(fd, "-------+-----------\n");
6304     dprintf(fd, " Frame | Stream ID \n");
6305     dprintf(fd, "-------+-----------\n");
6306     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6307         i != mPendingFrameDropList.end(); i++) {
6308         dprintf(fd, " %5d | %9d \n",
6309             i->frame_number, i->stream_ID);
6310     }
6311     dprintf(fd, "-------+-----------\n");
6312 
6313     dprintf(fd, "\n Camera HAL3 information End \n");
6314 
6315     /* use dumpsys media.camera as trigger to send update debug level event */
6316     mUpdateDebugLevel = true;
6317     pthread_mutex_unlock(&mMutex);
6318     return;
6319 }
6320 
6321 /*===========================================================================
6322  * FUNCTION   : flush
6323  *
6324  * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6325  *              conditionally restarts channels
6326  *
6327  * PARAMETERS :
6328  *  @ restartChannels: re-start all channels
6329  *  @ stopChannelImmediately: stop the channel immediately. This should be used
6330  *                            when device encountered an error and MIPI may has
6331  *                            been stopped.
6332  *
6333  * RETURN     :
6334  *          0 on success
6335  *          Error code on failure
6336  *==========================================================================*/
flush(bool restartChannels,bool stopChannelImmediately)6337 int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
6338 {
6339     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
6340     int32_t rc = NO_ERROR;
6341 
6342     LOGD("Unblocking Process Capture Request");
6343     pthread_mutex_lock(&mMutex);
6344     mFlush = true;
6345     pthread_mutex_unlock(&mMutex);
6346 
6347     // Disable HDR+ if it's enabled;
6348     {
6349         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6350         finishHdrPlusClientOpeningLocked(l);
6351         disableHdrPlusModeLocked();
6352     }
6353 
6354     rc = stopAllChannels();
6355     // unlink of dualcam
6356     if (mIsDeviceLinked) {
6357         cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6358                 &m_pDualCamCmdPtr->bundle_info;
6359         m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
6360         m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6361         pthread_mutex_lock(&gCamLock);
6362 
6363         if (mIsMainCamera == 1) {
6364             m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6365             m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
6366             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
6367             // related session id should be session id of linked session
6368             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6369         } else {
6370             m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6371             m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
6372             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
6373             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6374         }
6375         m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
6376         pthread_mutex_unlock(&gCamLock);
6377 
6378         rc = mCameraHandle->ops->set_dual_cam_cmd(
6379                 mCameraHandle->camera_handle);
6380         if (rc < 0) {
6381             LOGE("Dualcam: Unlink failed, but still proceed to close");
6382         }
6383     }
6384 
6385     if (rc < 0) {
6386         LOGE("stopAllChannels failed");
6387         return rc;
6388     }
6389     if (mChannelHandle) {
6390         stopChannelLocked(stopChannelImmediately);
6391     }
6392 
6393     // Reset bundle info
6394     rc = setBundleInfo();
6395     if (rc < 0) {
6396         LOGE("setBundleInfo failed %d", rc);
6397         return rc;
6398     }
6399 
6400     // Mutex Lock
6401     pthread_mutex_lock(&mMutex);
6402 
6403     // Unblock process_capture_request
6404     mPendingLiveRequest = 0;
6405     pthread_cond_signal(&mRequestCond);
6406 
6407     rc = notifyErrorForPendingRequests();
6408     if (rc < 0) {
6409         LOGE("notifyErrorForPendingRequests failed");
6410         pthread_mutex_unlock(&mMutex);
6411         return rc;
6412     }
6413 
6414     mFlush = false;
6415 
6416     // Start the Streams/Channels
6417     if (restartChannels) {
6418         rc = startAllChannels();
6419         if (rc < 0) {
6420             LOGE("startAllChannels failed");
6421             pthread_mutex_unlock(&mMutex);
6422             return rc;
6423         }
6424         if (mChannelHandle) {
6425             // Configure modules for stream on.
6426             rc = startChannelLocked();
6427             if (rc < 0) {
6428                 LOGE("startChannelLocked failed");
6429                 pthread_mutex_unlock(&mMutex);
6430                 return rc;
6431             }
6432         }
6433     }
6434     pthread_mutex_unlock(&mMutex);
6435 
6436     return 0;
6437 }
6438 
6439 /*===========================================================================
6440  * FUNCTION   : flushPerf
6441  *
6442  * DESCRIPTION: This is the performance optimization version of flush that does
6443  *              not use stream off, rather flushes the system
6444  *
6445  * PARAMETERS :
6446  *
6447  *
6448  * RETURN     : 0 : success
6449  *              -EINVAL: input is malformed (device is not valid)
6450  *              -ENODEV: if the device has encountered a serious error
6451  *==========================================================================*/
flushPerf()6452 int QCamera3HardwareInterface::flushPerf()
6453 {
6454     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
6455     int32_t rc = 0;
6456     struct timespec timeout;
6457     bool timed_wait = false;
6458 
6459     pthread_mutex_lock(&mMutex);
6460     mFlushPerf = true;
6461     mPendingBuffersMap.numPendingBufsAtFlush =
6462         mPendingBuffersMap.get_num_overall_buffers();
6463     LOGD("Calling flush. Wait for %d buffers to return",
6464         mPendingBuffersMap.numPendingBufsAtFlush);
6465 
6466     /* send the flush event to the backend */
6467     rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6468     if (rc < 0) {
6469         LOGE("Error in flush: IOCTL failure");
6470         mFlushPerf = false;
6471         pthread_mutex_unlock(&mMutex);
6472         return -ENODEV;
6473     }
6474 
6475     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6476         LOGD("No pending buffers in HAL, return flush");
6477         mFlushPerf = false;
6478         pthread_mutex_unlock(&mMutex);
6479         return rc;
6480     }
6481 
6482     /* wait on a signal that buffers were received */
6483     rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
6484     if (rc < 0) {
6485         LOGE("Error reading the real time clock, cannot use timed wait");
6486     } else {
6487         timeout.tv_sec += FLUSH_TIMEOUT;
6488         timed_wait = true;
6489     }
6490 
6491     //Block on conditional variable
6492     while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6493         LOGD("Waiting on mBuffersCond");
6494         if (!timed_wait) {
6495             rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6496             if (rc != 0) {
6497                  LOGE("pthread_cond_wait failed due to rc = %s",
6498                         strerror(rc));
6499                  break;
6500             }
6501         } else {
6502             rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6503             if (rc != 0) {
6504                 LOGE("pthread_cond_timedwait failed due to rc = %s",
6505                             strerror(rc));
6506                 break;
6507             }
6508         }
6509     }
6510     if (rc != 0) {
6511         mFlushPerf = false;
6512         pthread_mutex_unlock(&mMutex);
6513         return -ENODEV;
6514     }
6515 
6516     LOGD("Received buffers, now safe to return them");
6517 
6518     //make sure the channels handle flush
6519     //currently only required for the picture channel to release snapshot resources
6520     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6521             it != mStreamInfo.end(); it++) {
6522         QCamera3Channel *channel = (*it)->channel;
6523         if (channel) {
6524             rc = channel->flush();
6525             if (rc) {
6526                LOGE("Flushing the channels failed with error %d", rc);
6527                // even though the channel flush failed we need to continue and
6528                // return the buffers we have to the framework, however the return
6529                // value will be an error
6530                rc = -ENODEV;
6531             }
6532         }
6533     }
6534 
6535     /* notify the frameworks and send errored results */
6536     rc = notifyErrorForPendingRequests();
6537     if (rc < 0) {
6538         LOGE("notifyErrorForPendingRequests failed");
6539         pthread_mutex_unlock(&mMutex);
6540         return rc;
6541     }
6542 
6543     //unblock process_capture_request
6544     mPendingLiveRequest = 0;
6545     unblockRequestIfNecessary();
6546 
6547     mFlushPerf = false;
6548     pthread_mutex_unlock(&mMutex);
6549     LOGD ("Flush Operation complete. rc = %d", rc);
6550     return rc;
6551 }
6552 
6553 /*===========================================================================
6554  * FUNCTION   : handleCameraDeviceError
6555  *
6556  * DESCRIPTION: This function calls internal flush and notifies the error to
6557  *              framework and updates the state variable.
6558  *
6559  * PARAMETERS :
6560  *   @stopChannelImmediately : stop channels immediately without waiting for
6561  *                             frame boundary.
6562  *
6563  * RETURN     : NO_ERROR on Success
6564  *              Error code on failure
6565  *==========================================================================*/
handleCameraDeviceError(bool stopChannelImmediately)6566 int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
6567 {
6568     int32_t rc = NO_ERROR;
6569 
6570     {
6571         Mutex::Autolock lock(mFlushLock);
6572         pthread_mutex_lock(&mMutex);
6573         if (mState != ERROR) {
6574             //if mState != ERROR, nothing to be done
6575             pthread_mutex_unlock(&mMutex);
6576             return NO_ERROR;
6577         }
6578         pthread_mutex_unlock(&mMutex);
6579 
6580         rc = flush(false /* restart channels */, stopChannelImmediately);
6581         if (NO_ERROR != rc) {
6582             LOGE("internal flush to handle mState = ERROR failed");
6583         }
6584 
6585         pthread_mutex_lock(&mMutex);
6586         mState = DEINIT;
6587         pthread_mutex_unlock(&mMutex);
6588     }
6589 
6590     camera3_notify_msg_t notify_msg;
6591     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6592     notify_msg.type = CAMERA3_MSG_ERROR;
6593     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6594     notify_msg.message.error.error_stream = NULL;
6595     notify_msg.message.error.frame_number = 0;
6596     orchestrateNotify(&notify_msg);
6597 
6598     return rc;
6599 }
6600 
6601 /*===========================================================================
6602  * FUNCTION   : captureResultCb
6603  *
6604  * DESCRIPTION: Callback handler for all capture result
6605  *              (streams, as well as metadata)
6606  *
6607  * PARAMETERS :
6608  *   @metadata : metadata information
6609  *   @buffer   : actual gralloc buffer to be returned to frameworks.
6610  *               NULL if metadata.
6611  *
6612  * RETURN     : NONE
6613  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer)6614 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6615                 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6616 {
6617     if (metadata_buf) {
6618         pthread_mutex_lock(&mMutex);
6619         uint8_t batchSize = mBatchSize;
6620         pthread_mutex_unlock(&mMutex);
6621         if (batchSize) {
6622             handleBatchMetadata(metadata_buf,
6623                     true /* free_and_bufdone_meta_buf */);
6624         } else { /* mBatchSize = 0 */
6625             hdrPlusPerfLock(metadata_buf);
6626             pthread_mutex_lock(&mMutex);
6627             handleMetadataWithLock(metadata_buf,
6628                     true /* free_and_bufdone_meta_buf */,
6629                     true /* last urgent frame of batch metadata */,
6630                     true /* last frame of batch metadata */,
6631                     NULL);
6632             pthread_mutex_unlock(&mMutex);
6633         }
6634     } else if (isInputBuffer) {
6635         pthread_mutex_lock(&mMutex);
6636         handleInputBufferWithLock(frame_number);
6637         pthread_mutex_unlock(&mMutex);
6638     } else {
6639         pthread_mutex_lock(&mMutex);
6640         handleBufferWithLock(buffer, frame_number);
6641         pthread_mutex_unlock(&mMutex);
6642     }
6643     return;
6644 }
6645 
6646 /*===========================================================================
6647  * FUNCTION   : getReprocessibleOutputStreamId
6648  *
6649  * DESCRIPTION: Get source output stream id for the input reprocess stream
6650  *              based on size and format, which would be the largest
6651  *              output stream if an input stream exists.
6652  *
6653  * PARAMETERS :
6654  *   @id      : return the stream id if found
6655  *
6656  * RETURN     : int32_t type of status
6657  *              NO_ERROR  -- success
6658  *              none-zero failure code
6659  *==========================================================================*/
getReprocessibleOutputStreamId(uint32_t & id)6660 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6661 {
6662     /* check if any output or bidirectional stream with the same size and format
6663        and return that stream */
6664     if ((mInputStreamInfo.dim.width > 0) &&
6665             (mInputStreamInfo.dim.height > 0)) {
6666         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6667                 it != mStreamInfo.end(); it++) {
6668 
6669             camera3_stream_t *stream = (*it)->stream;
6670             if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6671                     (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6672                     (stream->format == mInputStreamInfo.format)) {
6673                 // Usage flag for an input stream and the source output stream
6674                 // may be different.
6675                 LOGD("Found reprocessible output stream! %p", *it);
6676                 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6677                          stream->usage, mInputStreamInfo.usage);
6678 
6679                 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6680                 if (channel != NULL && channel->mStreams[0]) {
6681                     id = channel->mStreams[0]->getMyServerID();
6682                     return NO_ERROR;
6683                 }
6684             }
6685         }
6686     } else {
6687         LOGD("No input stream, so no reprocessible output stream");
6688     }
6689     return NAME_NOT_FOUND;
6690 }
6691 
6692 /*===========================================================================
6693  * FUNCTION   : lookupFwkName
6694  *
6695  * DESCRIPTION: In case the enum is not same in fwk and backend
6696  *              make sure the parameter is correctly propogated
6697  *
6698  * PARAMETERS  :
6699  *   @arr      : map between the two enums
6700  *   @len      : len of the map
6701  *   @hal_name : name of the hal_parm to map
6702  *
6703  * RETURN     : int type of status
6704  *              fwk_name  -- success
6705  *              none-zero failure code
6706  *==========================================================================*/
lookupFwkName(const mapType * arr,size_t len,halType hal_name)6707 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6708         size_t len, halType hal_name)
6709 {
6710 
6711     for (size_t i = 0; i < len; i++) {
6712         if (arr[i].hal_name == hal_name) {
6713             return arr[i].fwk_name;
6714         }
6715     }
6716 
6717     /* Not able to find matching framework type is not necessarily
6718      * an error case. This happens when mm-camera supports more attributes
6719      * than the frameworks do */
6720     LOGH("Cannot find matching framework type");
6721     return NAME_NOT_FOUND;
6722 }
6723 
6724 /*===========================================================================
6725  * FUNCTION   : lookupHalName
6726  *
6727  * DESCRIPTION: In case the enum is not same in fwk and backend
6728  *              make sure the parameter is correctly propogated
6729  *
6730  * PARAMETERS  :
6731  *   @arr      : map between the two enums
6732  *   @len      : len of the map
6733  *   @fwk_name : name of the hal_parm to map
6734  *
6735  * RETURN     : int32_t type of status
6736  *              hal_name  -- success
6737  *              none-zero failure code
6738  *==========================================================================*/
lookupHalName(const mapType * arr,size_t len,fwkType fwk_name)6739 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6740         size_t len, fwkType fwk_name)
6741 {
6742     for (size_t i = 0; i < len; i++) {
6743         if (arr[i].fwk_name == fwk_name) {
6744             return arr[i].hal_name;
6745         }
6746     }
6747 
6748     LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6749     return NAME_NOT_FOUND;
6750 }
6751 
6752 /*===========================================================================
6753  * FUNCTION   : lookupProp
6754  *
6755  * DESCRIPTION: lookup a value by its name
6756  *
6757  * PARAMETERS :
6758  *   @arr     : map between the two enums
6759  *   @len     : size of the map
6760  *   @name    : name to be looked up
6761  *
6762  * RETURN     : Value if found
6763  *              CAM_CDS_MODE_MAX if not found
6764  *==========================================================================*/
lookupProp(const mapType * arr,size_t len,const char * name)6765 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6766         size_t len, const char *name)
6767 {
6768     if (name) {
6769         for (size_t i = 0; i < len; i++) {
6770             if (!strcmp(arr[i].desc, name)) {
6771                 return arr[i].val;
6772             }
6773         }
6774     }
6775     return CAM_CDS_MODE_MAX;
6776 }
6777 
6778 /*===========================================================================
6779  *
6780  * DESCRIPTION:
6781  *
6782  * PARAMETERS :
6783  *   @metadata : metadata information from callback
6784  *   @pendingRequest: pending request for this metadata
6785  *   @pprocDone: whether internal offline postprocsesing is done
6786  *   @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6787  *                         in a batch. Always true for non-batch mode.
6788  *
6789  * RETURN     : camera_metadata_t*
6790  *              metadata in a format specified by fwk
6791  *==========================================================================*/
6792 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,const PendingRequestInfo & pendingRequest,bool pprocDone,bool lastMetadataInBatch,const bool * enableZsl)6793 QCamera3HardwareInterface::translateFromHalMetadata(
6794                                  metadata_buffer_t *metadata,
6795                                  const PendingRequestInfo& pendingRequest,
6796                                  bool pprocDone,
6797                                  bool lastMetadataInBatch,
6798                                  const bool *enableZsl)
6799 {
6800     CameraMetadata camMetadata;
6801     camera_metadata_t *resultMetadata;
6802 
6803     if (!lastMetadataInBatch) {
6804         /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6805          * Timestamp is needed because it's used for shutter notify calculation.
6806          * */
6807         camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6808         resultMetadata = camMetadata.release();
6809         return resultMetadata;
6810     }
6811 
6812     if (pendingRequest.jpegMetadata.entryCount())
6813         camMetadata.append(pendingRequest.jpegMetadata);
6814 
6815     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6816     camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6817     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6818     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6819     camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
6820     if (mBatchSize == 0) {
6821         // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6822         camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
6823     }
6824 
6825     // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6826     // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6827     if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
6828         // DevCamDebug metadata translateFromHalMetadata AF
6829         IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6830                 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6831             int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6832             camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6833         }
6834         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6835                 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
6836             int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6837             camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6838         }
6839         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6840                 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
6841             int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6842             camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6843         }
6844         IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6845                 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6846             int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6847             camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6848         }
6849         IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6850                 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6851             int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6852             camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6853         }
6854         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6855                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6856             int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6857                 *DevCamDebug_af_monitor_pdaf_target_pos;
6858             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6859                 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6860         }
6861         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6862                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6863             int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6864                 *DevCamDebug_af_monitor_pdaf_confidence;
6865             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6866                 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6867         }
6868         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6869                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6870             int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6871             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6872                 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6873         }
6874         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6875                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6876             int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6877                 *DevCamDebug_af_monitor_tof_target_pos;
6878             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6879                 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6880         }
6881         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6882                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6883             int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6884                 *DevCamDebug_af_monitor_tof_confidence;
6885             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6886                 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6887         }
6888         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6889                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6890             int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6891             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6892                 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6893         }
6894         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6895                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6896             int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6897             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6898                 &fwk_DevCamDebug_af_monitor_type_select, 1);
6899         }
6900         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6901                 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6902             int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6903             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6904                 &fwk_DevCamDebug_af_monitor_refocus, 1);
6905         }
6906         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6907                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6908             int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6909             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6910                 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6911         }
6912         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6913                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6914             int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6915                 *DevCamDebug_af_search_pdaf_target_pos;
6916             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6917                 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6918         }
6919         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6920                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6921             int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6922             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6923                 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6924         }
6925         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6926                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6927             int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6928             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6929                 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6930         }
6931         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6932                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6933             int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6934             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6935                 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6936         }
6937         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6938                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6939             int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6940             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6941                 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6942         }
6943         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6944                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6945             int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6946                 *DevCamDebug_af_search_tof_target_pos;
6947             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6948                 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6949         }
6950         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6951                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6952             int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6953             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6954                 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6955         }
6956         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6957                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6958             int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6959             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6960                 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6961         }
6962         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6963                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6964             int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6965             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6966                 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6967         }
6968         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6969                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6970             int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6971             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6972                 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6973         }
6974         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6975                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6976             int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6977             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6978                 &fwk_DevCamDebug_af_search_type_select, 1);
6979         }
6980         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6981                 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6982             int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6983             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6984                 &fwk_DevCamDebug_af_search_next_pos, 1);
6985         }
6986         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6987                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6988             int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6989             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6990                 &fwk_DevCamDebug_af_search_target_pos, 1);
6991         }
6992         // DevCamDebug metadata translateFromHalMetadata AEC
6993         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6994                 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6995             int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6996             camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6997     }
6998         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6999                 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
7000             int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
7001             camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
7002         }
7003         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
7004                 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
7005             int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
7006             camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
7007         }
7008         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
7009                 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
7010             int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
7011             camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
7012         }
7013         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
7014                 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
7015             int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
7016             camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
7017         }
7018         IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
7019                 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
7020             float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
7021             camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
7022         }
7023         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
7024                 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
7025             int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
7026             camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
7027         }
7028         IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
7029                 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
7030             float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
7031             camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
7032         }
7033         // DevCamDebug metadata translateFromHalMetadata zzHDR
7034         IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
7035                 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
7036             float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
7037             camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
7038         }
7039         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
7040                 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
7041             int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
7042             camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
7043         }
7044         IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
7045                 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
7046             float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
7047             camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
7048         }
7049         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
7050                 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
7051             int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
7052             camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
7053         }
7054         IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
7055                 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
7056             float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
7057                 *DevCamDebug_aec_hdr_sensitivity_ratio;
7058             camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
7059                                &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
7060         }
7061         IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
7062                 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
7063             float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
7064             camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
7065                                &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
7066         }
7067         // DevCamDebug metadata translateFromHalMetadata ADRC
7068         IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
7069                 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
7070             float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
7071             camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
7072                                &fwk_DevCamDebug_aec_total_drc_gain, 1);
7073         }
7074         IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
7075                 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
7076             float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
7077             camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
7078                                &fwk_DevCamDebug_aec_color_drc_gain, 1);
7079         }
7080         IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
7081                 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
7082             float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
7083             camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
7084         }
7085         IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
7086                 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
7087             float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
7088             camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
7089         }
7090         IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7091                 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7092             float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7093             camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7094         }
7095         IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7096                 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7097             float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7098             camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7099         }
7100         // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7101         IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7102                 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7103             float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7104             camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7105                                &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7106         }
7107         IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7108                 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7109             float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7110             camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7111                                &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7112         }
7113         IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7114                 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7115             float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7116             camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7117                                &fwk_DevCamDebug_aec_subject_motion, 1);
7118         }
7119         // DevCamDebug metadata translateFromHalMetadata AWB
7120         IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7121                 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7122             float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7123             camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7124         }
7125         IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7126                 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7127             float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7128             camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7129         }
7130         IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7131                 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7132             float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7133             camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7134         }
7135         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7136                 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7137             int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7138             camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7139         }
7140         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7141                 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7142             int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7143             camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7144         }
7145     }
7146     // atrace_end(ATRACE_TAG_ALWAYS);
7147 
7148     IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7149         int64_t fwk_frame_number = *frame_number;
7150         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7151     }
7152 
7153     IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7154         int32_t fps_range[2];
7155         fps_range[0] = (int32_t)float_range->min_fps;
7156         fps_range[1] = (int32_t)float_range->max_fps;
7157         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7158                                       fps_range, 2);
7159         LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7160              fps_range[0], fps_range[1]);
7161     }
7162 
7163     IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7164         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7165     }
7166 
7167     IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7168         int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7169                 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7170                 *sceneMode);
7171         if (NAME_NOT_FOUND != val) {
7172             uint8_t fwkSceneMode = (uint8_t)val;
7173             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7174             LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7175                      fwkSceneMode);
7176         }
7177     }
7178 
7179     IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7180         uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7181         camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7182     }
7183 
7184     IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7185         uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7186         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7187     }
7188 
7189     IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7190         uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7191         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7192     }
7193 
7194     IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7195             CAM_INTF_META_EDGE_MODE, metadata) {
7196         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7197     }
7198 
7199     IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7200         uint8_t fwk_flashPower = (uint8_t) *flashPower;
7201         camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7202     }
7203 
7204     IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7205         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7206     }
7207 
7208     IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7209         if (0 <= *flashState) {
7210             uint8_t fwk_flashState = (uint8_t) *flashState;
7211             if (!gCamCapability[mCameraId]->flash_available) {
7212                 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7213             }
7214             camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7215         }
7216     }
7217 
7218     IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7219         int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7220         if (NAME_NOT_FOUND != val) {
7221             uint8_t fwk_flashMode = (uint8_t)val;
7222             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7223         }
7224     }
7225 
7226     IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7227         uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7228         camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7229     }
7230 
7231     IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7232         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7233     }
7234 
7235     IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7236         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7237     }
7238 
7239     IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7240         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7241     }
7242 
7243     IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7244         uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7245         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7246     }
7247 
7248     IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7249         uint8_t fwk_videoStab = (uint8_t) *videoStab;
7250         LOGD("fwk_videoStab = %d", fwk_videoStab);
7251         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7252     } else {
7253         // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7254         // and so hardcoding the Video Stab result to OFF mode.
7255         uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7256         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
7257         LOGD("EIS result default to OFF mode");
7258     }
7259 
7260     IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7261         uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7262         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7263     }
7264 
7265     IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7266         camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7267     }
7268 
7269     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7270         CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
7271         float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
7272 
7273         adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7274               gCamCapability[mCameraId]->color_arrangement);
7275 
7276         LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
7277           blackLevelAppliedPattern->cam_black_level[0],
7278           blackLevelAppliedPattern->cam_black_level[1],
7279           blackLevelAppliedPattern->cam_black_level[2],
7280           blackLevelAppliedPattern->cam_black_level[3]);
7281         camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7282                 BLACK_LEVEL_PATTERN_CNT);
7283 
7284 #ifndef USE_HAL_3_3
7285         // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
7286         // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
7287         // depth space.
7288         fwk_blackLevelInd[0] /= 16.0;
7289         fwk_blackLevelInd[1] /= 16.0;
7290         fwk_blackLevelInd[2] /= 16.0;
7291         fwk_blackLevelInd[3] /= 16.0;
7292         camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7293                 BLACK_LEVEL_PATTERN_CNT);
7294 #endif
7295     }
7296 
7297 #ifndef USE_HAL_3_3
7298     // Fixed whitelevel is used by ISP/Sensor
7299     camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7300             &gCamCapability[mCameraId]->white_level, 1);
7301 #endif
7302 
7303     IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7304             CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7305         int32_t scalerCropRegion[4];
7306         scalerCropRegion[0] = hScalerCropRegion->left;
7307         scalerCropRegion[1] = hScalerCropRegion->top;
7308         scalerCropRegion[2] = hScalerCropRegion->width;
7309         scalerCropRegion[3] = hScalerCropRegion->height;
7310 
7311         // Adjust crop region from sensor output coordinate system to active
7312         // array coordinate system.
7313         mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7314                 scalerCropRegion[2], scalerCropRegion[3]);
7315 
7316         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7317     }
7318 
7319     IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7320         LOGD("sensorExpTime = %lld", *sensorExpTime);
7321         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7322     }
7323 
7324     IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7325         LOGD("expTimeBoost = %f", *expTimeBoost);
7326         camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7327     }
7328 
7329     IF_META_AVAILABLE(int64_t, sensorFameDuration,
7330             CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7331         LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7332         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7333     }
7334 
7335     IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7336             CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7337         LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7338         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7339                 sensorRollingShutterSkew, 1);
7340     }
7341 
7342     IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7343         LOGD("sensorSensitivity = %d", *sensorSensitivity);
7344         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7345 
7346         //calculate the noise profile based on sensitivity
7347         double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7348         double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7349         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7350         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7351             noise_profile[i]   = noise_profile_S;
7352             noise_profile[i+1] = noise_profile_O;
7353         }
7354         LOGD("noise model entry (S, O) is (%f, %f)",
7355                 noise_profile_S, noise_profile_O);
7356         camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7357                 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7358     }
7359 
7360 #ifndef USE_HAL_3_3
7361     int32_t fwk_ispSensitivity = 100;
7362     IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
7363         fwk_ispSensitivity = (int32_t) *ispSensitivity;
7364     }
7365     IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7366         fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7367     }
7368     camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
7369 #endif
7370 
7371     IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7372         uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7373         camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7374     }
7375 
7376     IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7377         int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7378                 *faceDetectMode);
7379         if (NAME_NOT_FOUND != val) {
7380             uint8_t fwk_faceDetectMode = (uint8_t)val;
7381             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7382 
7383             if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7384                 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7385                         CAM_INTF_META_FACE_DETECTION, metadata) {
7386                     uint8_t numFaces = MIN(
7387                             faceDetectionInfo->num_faces_detected, MAX_ROI);
7388                     int32_t faceIds[MAX_ROI];
7389                     uint8_t faceScores[MAX_ROI];
7390                     int32_t faceRectangles[MAX_ROI * 4];
7391                     int32_t faceLandmarks[MAX_ROI * 6];
7392                     size_t j = 0, k = 0;
7393 
7394                     for (size_t i = 0; i < numFaces; i++) {
7395                         faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7396                         // Adjust crop region from sensor output coordinate system to active
7397                         // array coordinate system.
7398                         cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
7399                         mCropRegionMapper.toActiveArray(rect.left, rect.top,
7400                                 rect.width, rect.height);
7401 
7402                         convertToRegions(rect, faceRectangles+j, -1);
7403 
7404                         LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7405                                 "bottom-right (%d, %d)",
7406                                 faceDetectionInfo->frame_id, i,
7407                                 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7408                                 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7409 
7410                         j+= 4;
7411                     }
7412                     if (numFaces <= 0) {
7413                         memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7414                         memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7415                         memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7416                         memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7417                     }
7418 
7419                     camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7420                             numFaces);
7421                     camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7422                             faceRectangles, numFaces * 4U);
7423                     if (fwk_faceDetectMode ==
7424                             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7425                         IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7426                                 CAM_INTF_META_FACE_LANDMARK, metadata) {
7427 
7428                             for (size_t i = 0; i < numFaces; i++) {
7429                                 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
7430                                 // Map the co-ordinate sensor output coordinate system to active
7431                                 // array coordinate system.
7432                                 mCropRegionMapper.toActiveArray(
7433                                         face_landmarks.left_eye_center.x,
7434                                         face_landmarks.left_eye_center.y);
7435                                 mCropRegionMapper.toActiveArray(
7436                                         face_landmarks.right_eye_center.x,
7437                                         face_landmarks.right_eye_center.y);
7438                                 mCropRegionMapper.toActiveArray(
7439                                         face_landmarks.mouth_center.x,
7440                                         face_landmarks.mouth_center.y);
7441 
7442                                 convertLandmarks(face_landmarks, faceLandmarks+k);
7443 
7444                                 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7445                                         "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7446                                         faceDetectionInfo->frame_id, i,
7447                                         faceLandmarks[k + LEFT_EYE_X],
7448                                         faceLandmarks[k + LEFT_EYE_Y],
7449                                         faceLandmarks[k + RIGHT_EYE_X],
7450                                         faceLandmarks[k + RIGHT_EYE_Y],
7451                                         faceLandmarks[k + MOUTH_X],
7452                                         faceLandmarks[k + MOUTH_Y]);
7453 
7454                                 k+= TOTAL_LANDMARK_INDICES;
7455                             }
7456                         } else {
7457                             for (size_t i = 0; i < numFaces; i++) {
7458                                 setInvalidLandmarks(faceLandmarks+k);
7459                                 k+= TOTAL_LANDMARK_INDICES;
7460                             }
7461                         }
7462 
7463                         for (size_t i = 0; i < numFaces; i++) {
7464                             faceIds[i] = faceDetectionInfo->faces[i].face_id;
7465 
7466                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7467                                     faceDetectionInfo->frame_id, i, faceIds[i]);
7468                         }
7469 
7470                         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7471                         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7472                                 faceLandmarks, numFaces * 6U);
7473                     }
7474                     IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7475                             CAM_INTF_META_FACE_BLINK, metadata) {
7476                         uint8_t detected[MAX_ROI];
7477                         uint8_t degree[MAX_ROI * 2];
7478                         for (size_t i = 0; i < numFaces; i++) {
7479                             detected[i] = blinks->blink[i].blink_detected;
7480                             degree[2 * i] = blinks->blink[i].left_blink;
7481                             degree[2 * i + 1] = blinks->blink[i].right_blink;
7482 
7483                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7484                                     "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7485                                     faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7486                                     degree[2 * i + 1]);
7487                         }
7488                         camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7489                                 detected, numFaces);
7490                         camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7491                                 degree, numFaces * 2);
7492                     }
7493                     IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7494                             CAM_INTF_META_FACE_SMILE, metadata) {
7495                         uint8_t degree[MAX_ROI];
7496                         uint8_t confidence[MAX_ROI];
7497                         for (size_t i = 0; i < numFaces; i++) {
7498                             degree[i] = smiles->smile[i].smile_degree;
7499                             confidence[i] = smiles->smile[i].smile_confidence;
7500 
7501                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7502                                     "smile_degree=%d, smile_score=%d",
7503                                     faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
7504                         }
7505                         camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7506                                 degree, numFaces);
7507                         camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7508                                 confidence, numFaces);
7509                     }
7510                     IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7511                             CAM_INTF_META_FACE_GAZE, metadata) {
7512                         int8_t angle[MAX_ROI];
7513                         int32_t direction[MAX_ROI * 3];
7514                         int8_t degree[MAX_ROI * 2];
7515                         for (size_t i = 0; i < numFaces; i++) {
7516                             angle[i] = gazes->gaze[i].gaze_angle;
7517                             direction[3 * i] = gazes->gaze[i].updown_dir;
7518                             direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7519                             direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7520                             degree[2 * i] = gazes->gaze[i].left_right_gaze;
7521                             degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
7522 
7523                             LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7524                                     "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7525                                     "left_right_gaze=%d, top_bottom_gaze=%d",
7526                                     faceDetectionInfo->frame_id, i, angle[i],
7527                                     direction[3 * i], direction[3 * i + 1],
7528                                     direction[3 * i + 2],
7529                                     degree[2 * i], degree[2 * i + 1]);
7530                         }
7531                         camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7532                                 (uint8_t *)angle, numFaces);
7533                         camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7534                                 direction, numFaces * 3);
7535                         camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7536                                 (uint8_t *)degree, numFaces * 2);
7537                     }
7538                 }
7539             }
7540         }
7541     }
7542 
7543     IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7544         uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
7545         int32_t histogramBins = 0;
7546         camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
7547         camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
7548 
7549         IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7550             histogramBins = *histBins;
7551             camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7552         }
7553 
7554         if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
7555             IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7556                 // process histogram statistics info
7557                 int32_t* histogramData = NULL;
7558 
7559                 switch (stats_data->type) {
7560                 case CAM_HISTOGRAM_TYPE_BAYER:
7561                     switch (stats_data->bayer_stats.data_type) {
7562                         case CAM_STATS_CHANNEL_GR:
7563                           histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7564                           break;
7565                         case CAM_STATS_CHANNEL_GB:
7566                           histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7567                           break;
7568                         case CAM_STATS_CHANNEL_B:
7569                           histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7570                           break;
7571                         case CAM_STATS_CHANNEL_Y:
7572                         case CAM_STATS_CHANNEL_ALL:
7573                         case CAM_STATS_CHANNEL_R:
7574                         default:
7575                           histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7576                           break;
7577                     }
7578                     break;
7579                 case CAM_HISTOGRAM_TYPE_YUV:
7580                     histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
7581                     break;
7582                 }
7583 
7584                 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
7585             }
7586         }
7587     }
7588 
7589     IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7590             CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7591         uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7592         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7593     }
7594 
7595     IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7596             CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7597         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7598                 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7599     }
7600 
7601     IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7602             CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7603         size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7604                 CAM_MAX_SHADING_MAP_HEIGHT);
7605         size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7606                 CAM_MAX_SHADING_MAP_WIDTH);
7607         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7608                 lensShadingMap->lens_shading, 4U * map_width * map_height);
7609     }
7610 
7611     IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7612         uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7613         camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7614     }
7615 
7616     IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7617         //Populate CAM_INTF_META_TONEMAP_CURVES
7618         /* ch0 = G, ch 1 = B, ch 2 = R*/
7619         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7620             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7621                      tonemap->tonemap_points_cnt,
7622                     CAM_MAX_TONEMAP_CURVE_SIZE);
7623             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7624         }
7625 
7626         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7627                         &tonemap->curves[0].tonemap_points[0][0],
7628                         tonemap->tonemap_points_cnt * 2);
7629 
7630         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7631                         &tonemap->curves[1].tonemap_points[0][0],
7632                         tonemap->tonemap_points_cnt * 2);
7633 
7634         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7635                         &tonemap->curves[2].tonemap_points[0][0],
7636                         tonemap->tonemap_points_cnt * 2);
7637     }
7638 
7639     IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7640             CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7641         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7642                 CC_GAIN_MAX);
7643     }
7644 
7645     IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7646             CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7647         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7648                 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7649                 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7650     }
7651 
7652     IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7653             CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7654         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7655             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7656                      toneCurve->tonemap_points_cnt,
7657                     CAM_MAX_TONEMAP_CURVE_SIZE);
7658             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7659         }
7660         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7661                 (float*)toneCurve->curve.tonemap_points,
7662                 toneCurve->tonemap_points_cnt * 2);
7663     }
7664 
7665     IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7666             CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7667         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7668                 predColorCorrectionGains->gains, 4);
7669     }
7670 
7671     IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7672             CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7673         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7674                 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7675                 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7676     }
7677 
7678     IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7679         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7680     }
7681 
7682     IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7683         uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7684         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7685     }
7686 
7687     IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7688         uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7689         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7690     }
7691 
7692     IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7693         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7694                 *effectMode);
7695         if (NAME_NOT_FOUND != val) {
7696             uint8_t fwk_effectMode = (uint8_t)val;
7697             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7698         }
7699     }
7700 
7701     IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7702             CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7703         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7704                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7705         if (NAME_NOT_FOUND != fwk_testPatternMode) {
7706             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7707         }
7708         int32_t fwk_testPatternData[4];
7709         fwk_testPatternData[0] = testPatternData->r;
7710         fwk_testPatternData[3] = testPatternData->b;
7711         switch (gCamCapability[mCameraId]->color_arrangement) {
7712         case CAM_FILTER_ARRANGEMENT_RGGB:
7713         case CAM_FILTER_ARRANGEMENT_GRBG:
7714             fwk_testPatternData[1] = testPatternData->gr;
7715             fwk_testPatternData[2] = testPatternData->gb;
7716             break;
7717         case CAM_FILTER_ARRANGEMENT_GBRG:
7718         case CAM_FILTER_ARRANGEMENT_BGGR:
7719             fwk_testPatternData[2] = testPatternData->gr;
7720             fwk_testPatternData[1] = testPatternData->gb;
7721             break;
7722         default:
7723             LOGE("color arrangement %d is not supported",
7724                 gCamCapability[mCameraId]->color_arrangement);
7725             break;
7726         }
7727         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7728     }
7729 
7730     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7731         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7732     }
7733 
7734     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7735         String8 str((const char *)gps_methods);
7736         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7737     }
7738 
7739     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7740         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7741     }
7742 
7743     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7744         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7745     }
7746 
7747     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7748         uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7749         camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7750     }
7751 
7752     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7753         uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7754         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7755     }
7756 
7757     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7758         int32_t fwk_thumb_size[2];
7759         fwk_thumb_size[0] = thumb_size->width;
7760         fwk_thumb_size[1] = thumb_size->height;
7761         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7762     }
7763 
7764     // Skip reprocess metadata if there is no input stream.
7765     if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7766         IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7767             camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7768                     privateData,
7769                     MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7770         }
7771     }
7772 
7773     IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
7774         camMetadata.update(QCAMERA3_EXPOSURE_METER,
7775                 meteringMode, 1);
7776     }
7777 
7778     IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7779             CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7780         LOGD("hdr_scene_data: %d %f\n",
7781                 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7782         uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7783         float isHdrConfidence = hdr_scene_data->hdr_confidence;
7784         camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7785                            &isHdr, 1);
7786         camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7787                            &isHdrConfidence, 1);
7788     }
7789 
7790 
7791 
7792     if (metadata->is_tuning_params_valid) {
7793         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7794         uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7795         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7796 
7797 
7798         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7799                 sizeof(uint32_t));
7800         data += sizeof(uint32_t);
7801 
7802         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7803                 sizeof(uint32_t));
7804         LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7805         data += sizeof(uint32_t);
7806 
7807         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7808                 sizeof(uint32_t));
7809         LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7810         data += sizeof(uint32_t);
7811 
7812         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7813                 sizeof(uint32_t));
7814         LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7815         data += sizeof(uint32_t);
7816 
7817         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7818                 sizeof(uint32_t));
7819         LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7820         data += sizeof(uint32_t);
7821 
7822         metadata->tuning_params.tuning_mod3_data_size = 0;
7823         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7824                 sizeof(uint32_t));
7825         LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7826         data += sizeof(uint32_t);
7827 
7828         size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7829                 TUNING_SENSOR_DATA_MAX);
7830         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7831                 count);
7832         data += count;
7833 
7834         count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7835                 TUNING_VFE_DATA_MAX);
7836         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7837                 count);
7838         data += count;
7839 
7840         count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7841                 TUNING_CPP_DATA_MAX);
7842         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7843                 count);
7844         data += count;
7845 
7846         count = MIN(metadata->tuning_params.tuning_cac_data_size,
7847                 TUNING_CAC_DATA_MAX);
7848         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7849                 count);
7850         data += count;
7851 
7852         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7853                 (int32_t *)(void *)tuning_meta_data_blob,
7854                 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7855     }
7856 
7857     IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7858             CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7859         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7860                 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7861                 NEUTRAL_COL_POINTS);
7862     }
7863 
7864     IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7865         uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7866         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7867     }
7868 
7869     IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7870         int32_t aeRegions[REGIONS_TUPLE_COUNT];
7871         // Adjust crop region from sensor output coordinate system to active
7872         // array coordinate system.
7873         cam_rect_t hAeRect = hAeRegions->rect;
7874         mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7875                 hAeRect.width, hAeRect.height);
7876 
7877         convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
7878         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7879                 REGIONS_TUPLE_COUNT);
7880         LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7881                  aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7882                 hAeRect.left, hAeRect.top, hAeRect.width,
7883                 hAeRect.height);
7884     }
7885 
7886     if (!pendingRequest.focusStateSent) {
7887         if (pendingRequest.focusStateValid) {
7888             camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7889             LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
7890         } else {
7891             IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7892                 uint8_t fwk_afState = (uint8_t) *afState;
7893                 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7894                 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7895             }
7896         }
7897     }
7898 
7899     IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7900         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7901     }
7902 
7903     IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7904         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7905     }
7906 
7907     IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7908         uint8_t fwk_lensState = *lensState;
7909         camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7910     }
7911 
7912     IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
7913         uint32_t ab_mode = *hal_ab_mode;
7914         if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7915                 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7916               ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7917         }
7918         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
7919                 ab_mode);
7920         if (NAME_NOT_FOUND != val) {
7921             uint8_t fwk_ab_mode = (uint8_t)val;
7922             camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7923         }
7924     }
7925 
7926     IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7927         int val = lookupFwkName(SCENE_MODES_MAP,
7928                 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7929         if (NAME_NOT_FOUND != val) {
7930             uint8_t fwkBestshotMode = (uint8_t)val;
7931             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7932             LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7933         } else {
7934             LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7935         }
7936     }
7937 
7938     IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7939          uint8_t fwk_mode = (uint8_t) *mode;
7940          camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7941     }
7942 
7943     /* Constant metadata values to be update*/
7944     uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7945     camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7946 
7947     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7948     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7949 
7950     int32_t hotPixelMap[2];
7951     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7952 
7953     // CDS
7954     IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7955         camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7956     }
7957 
7958     IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7959         int32_t fwk_hdr;
7960         int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
7961         if(*vhdr == CAM_SENSOR_HDR_OFF) {
7962             fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7963         } else {
7964             fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7965         }
7966 
7967         if(fwk_hdr != curr_hdr_state) {
7968            LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7969            if(fwk_hdr)
7970               mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7971            else
7972               mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7973         }
7974         camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7975     }
7976 
7977     //binning correction
7978     IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7979             CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7980         int32_t fwk_bin_mode = (int32_t) *bin_correction;
7981         camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7982     }
7983 
7984     IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
7985         int32_t fwk_ir = (int32_t) *ir;
7986         int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7987         int8_t is_ir_on = 0;
7988 
7989         (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7990         if(is_ir_on != curr_ir_state) {
7991            LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7992            if(is_ir_on)
7993               mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7994            else
7995               mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7996         }
7997         camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
7998     }
7999 
8000     // AEC SPEED
8001     IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
8002         camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
8003     }
8004 
8005     // AWB SPEED
8006     IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
8007         camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
8008     }
8009 
8010     // TNR
8011     IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
8012         uint8_t tnr_enable       = tnr->denoise_enable;
8013         int32_t tnr_process_type = (int32_t)tnr->process_plates;
8014         int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
8015         int8_t is_tnr_on = 0;
8016 
8017         (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
8018         if(is_tnr_on != curr_tnr_state) {
8019            LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
8020            if(is_tnr_on)
8021               mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
8022            else
8023               mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
8024         }
8025 
8026         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8027         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8028     }
8029 
8030     // Reprocess crop data
8031     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
8032         uint8_t cnt = crop_data->num_of_streams;
8033         if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
8034             // mm-qcamera-daemon only posts crop_data for streams
8035             // not linked to pproc. So no valid crop metadata is not
8036             // necessarily an error case.
8037             LOGD("No valid crop metadata entries");
8038         } else {
8039             uint32_t reproc_stream_id;
8040             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8041                 LOGD("No reprocessible stream found, ignore crop data");
8042             } else {
8043                 int rc = NO_ERROR;
8044                 Vector<int32_t> roi_map;
8045                 int32_t *crop = new int32_t[cnt*4];
8046                 if (NULL == crop) {
8047                    rc = NO_MEMORY;
8048                 }
8049                 if (NO_ERROR == rc) {
8050                     int32_t streams_found = 0;
8051                     for (size_t i = 0; i < cnt; i++) {
8052                         if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
8053                             if (pprocDone) {
8054                                 // HAL already does internal reprocessing,
8055                                 // either via reprocessing before JPEG encoding,
8056                                 // or offline postprocessing for pproc bypass case.
8057                                 crop[0] = 0;
8058                                 crop[1] = 0;
8059                                 crop[2] = mInputStreamInfo.dim.width;
8060                                 crop[3] = mInputStreamInfo.dim.height;
8061                             } else {
8062                                 crop[0] = crop_data->crop_info[i].crop.left;
8063                                 crop[1] = crop_data->crop_info[i].crop.top;
8064                                 crop[2] = crop_data->crop_info[i].crop.width;
8065                                 crop[3] = crop_data->crop_info[i].crop.height;
8066                             }
8067                             roi_map.add(crop_data->crop_info[i].roi_map.left);
8068                             roi_map.add(crop_data->crop_info[i].roi_map.top);
8069                             roi_map.add(crop_data->crop_info[i].roi_map.width);
8070                             roi_map.add(crop_data->crop_info[i].roi_map.height);
8071                             streams_found++;
8072                             LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
8073                                     crop[0], crop[1], crop[2], crop[3]);
8074                             LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
8075                                     crop_data->crop_info[i].roi_map.left,
8076                                     crop_data->crop_info[i].roi_map.top,
8077                                     crop_data->crop_info[i].roi_map.width,
8078                                     crop_data->crop_info[i].roi_map.height);
8079                             break;
8080 
8081                        }
8082                     }
8083                     camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
8084                             &streams_found, 1);
8085                     camMetadata.update(QCAMERA3_CROP_REPROCESS,
8086                             crop, (size_t)(streams_found * 4));
8087                     if (roi_map.array()) {
8088                         camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
8089                                 roi_map.array(), roi_map.size());
8090                     }
8091                }
8092                if (crop) {
8093                    delete [] crop;
8094                }
8095             }
8096         }
8097     }
8098 
8099     if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8100         // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8101         // so hardcoding the CAC result to OFF mode.
8102         uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8103         camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8104     } else {
8105         IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8106             int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8107                     *cacMode);
8108             if (NAME_NOT_FOUND != val) {
8109                 uint8_t resultCacMode = (uint8_t)val;
8110                 // check whether CAC result from CB is equal to Framework set CAC mode
8111                 // If not equal then set the CAC mode came in corresponding request
8112                 if (pendingRequest.fwkCacMode != resultCacMode) {
8113                     resultCacMode = pendingRequest.fwkCacMode;
8114                 }
8115                 //Check if CAC is disabled by property
8116                 if (m_cacModeDisabled) {
8117                     resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8118                 }
8119 
8120                 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
8121                 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8122             } else {
8123                 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8124             }
8125         }
8126     }
8127 
8128     // Post blob of cam_cds_data through vendor tag.
8129     IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8130         uint8_t cnt = cdsInfo->num_of_streams;
8131         cam_cds_data_t cdsDataOverride;
8132         memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8133         cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8134         cdsDataOverride.num_of_streams = 1;
8135         if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8136             uint32_t reproc_stream_id;
8137             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8138                 LOGD("No reprocessible stream found, ignore cds data");
8139             } else {
8140                 for (size_t i = 0; i < cnt; i++) {
8141                     if (cdsInfo->cds_info[i].stream_id ==
8142                             reproc_stream_id) {
8143                         cdsDataOverride.cds_info[0].cds_enable =
8144                                 cdsInfo->cds_info[i].cds_enable;
8145                         break;
8146                     }
8147                 }
8148             }
8149         } else {
8150             LOGD("Invalid stream count %d in CDS_DATA", cnt);
8151         }
8152         camMetadata.update(QCAMERA3_CDS_INFO,
8153                 (uint8_t *)&cdsDataOverride,
8154                 sizeof(cam_cds_data_t));
8155     }
8156 
8157     // Ldaf calibration data
8158     if (!mLdafCalibExist) {
8159         IF_META_AVAILABLE(uint32_t, ldafCalib,
8160                 CAM_INTF_META_LDAF_EXIF, metadata) {
8161             mLdafCalibExist = true;
8162             mLdafCalib[0] = ldafCalib[0];
8163             mLdafCalib[1] = ldafCalib[1];
8164             LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8165                     ldafCalib[0], ldafCalib[1]);
8166         }
8167     }
8168 
8169     // EXIF debug data through vendor tag
8170     /*
8171      * Mobicat Mask can assume 3 values:
8172      * 1 refers to Mobicat data,
8173      * 2 refers to Stats Debug and Exif Debug Data
8174      * 3 refers to Mobicat and Stats Debug Data
8175      * We want to make sure that we are sending Exif debug data
8176      * only when Mobicat Mask is 2.
8177      */
8178     if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8179         camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8180                 (uint8_t *)(void *)mExifParams.debug_params,
8181                 sizeof(mm_jpeg_debug_exif_params_t));
8182     }
8183 
8184     // Reprocess and DDM debug data through vendor tag
8185     cam_reprocess_info_t repro_info;
8186     memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
8187     IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8188             CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
8189         memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
8190     }
8191     IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8192             CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
8193         memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
8194     }
8195     IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8196             CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
8197         memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
8198     }
8199     IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8200             CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
8201         memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
8202     }
8203     IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8204             CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
8205         memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
8206     }
8207     IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
8208         memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
8209     }
8210     IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8211             CAM_INTF_PARM_ROTATION, metadata) {
8212         memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
8213     }
8214     IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8215         memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8216     }
8217     IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8218         memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8219     }
8220     camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8221         (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
8222 
8223     // INSTANT AEC MODE
8224     IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8225             CAM_INTF_PARM_INSTANT_AEC, metadata) {
8226         camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8227     }
8228 
8229     // AF scene change
8230     IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8231         camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8232     }
8233 
8234     // Enable ZSL
8235     if (enableZsl != nullptr) {
8236         uint8_t value = *enableZsl ?
8237                 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8238         camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8239     }
8240 
8241     // OIS Data
8242     IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8243         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8244             &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8245         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8246             frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8247         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8248             frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8249         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8250             frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
8251     }
8252 
8253     resultMetadata = camMetadata.release();
8254     return resultMetadata;
8255 }
8256 
8257 /*===========================================================================
8258  * FUNCTION   : saveExifParams
8259  *
8260  * DESCRIPTION:
8261  *
8262  * PARAMETERS :
8263  *   @metadata : metadata information from callback
8264  *
8265  * RETURN     : none
8266  *
8267  *==========================================================================*/
saveExifParams(metadata_buffer_t * metadata)8268 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8269 {
8270     IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8271             CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8272         if (mExifParams.debug_params) {
8273             mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8274             mExifParams.debug_params->ae_debug_params_valid = TRUE;
8275         }
8276     }
8277     IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8278             CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8279         if (mExifParams.debug_params) {
8280             mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8281             mExifParams.debug_params->awb_debug_params_valid = TRUE;
8282         }
8283     }
8284     IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8285             CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8286         if (mExifParams.debug_params) {
8287             mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8288             mExifParams.debug_params->af_debug_params_valid = TRUE;
8289         }
8290     }
8291     IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8292             CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8293         if (mExifParams.debug_params) {
8294             mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8295             mExifParams.debug_params->asd_debug_params_valid = TRUE;
8296         }
8297     }
8298     IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8299             CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8300         if (mExifParams.debug_params) {
8301             mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8302             mExifParams.debug_params->stats_debug_params_valid = TRUE;
8303         }
8304     }
8305     IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8306             CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8307         if (mExifParams.debug_params) {
8308             mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8309             mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8310         }
8311     }
8312     IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8313             CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8314         if (mExifParams.debug_params) {
8315             mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8316             mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8317         }
8318     }
8319     IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8320             CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8321         if (mExifParams.debug_params) {
8322             mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8323             mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8324         }
8325     }
8326 }
8327 
8328 /*===========================================================================
8329  * FUNCTION   : get3AExifParams
8330  *
8331  * DESCRIPTION:
8332  *
8333  * PARAMETERS : none
8334  *
8335  *
8336  * RETURN     : mm_jpeg_exif_params_t
8337  *
8338  *==========================================================================*/
get3AExifParams()8339 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8340 {
8341     return mExifParams;
8342 }
8343 
8344 /*===========================================================================
8345  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
8346  *
8347  * DESCRIPTION:
8348  *
8349  * PARAMETERS :
8350  *   @metadata : metadata information from callback
8351  *   @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8352  *                               urgent metadata in a batch. Always true for
8353  *                               non-batch mode.
8354  *   @frame_number :             frame number for this urgent metadata
8355  *   @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8356  *                         i.e. even though it doesn't map to a valid partial
8357  *                         frame number, its metadata entries should be kept.
8358  * RETURN     : camera_metadata_t*
8359  *              metadata in a format specified by fwk
8360  *==========================================================================*/
8361 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata,bool lastUrgentMetadataInBatch,uint32_t frame_number,bool isJumpstartMetadata)8362 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
8363                                 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
8364                                  uint32_t frame_number, bool isJumpstartMetadata)
8365 {
8366     CameraMetadata camMetadata;
8367     camera_metadata_t *resultMetadata;
8368 
8369     if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
8370         /* In batch mode, use empty metadata if this is not the last in batch
8371          */
8372         resultMetadata = allocate_camera_metadata(0, 0);
8373         return resultMetadata;
8374     }
8375 
8376     IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8377         uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8378         camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8379         LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8380     }
8381 
8382     IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8383         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8384                 &aecTrigger->trigger, 1);
8385         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8386                 &aecTrigger->trigger_id, 1);
8387         LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8388                  aecTrigger->trigger);
8389         LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8390                 aecTrigger->trigger_id);
8391     }
8392 
8393     IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8394         uint8_t fwk_ae_state = (uint8_t) *ae_state;
8395         camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8396         LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8397     }
8398 
8399     IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8400         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8401         if (NAME_NOT_FOUND != val) {
8402             uint8_t fwkAfMode = (uint8_t)val;
8403             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8404             LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8405         } else {
8406             LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8407                     val);
8408         }
8409     }
8410 
8411     IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8412         LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8413             af_trigger->trigger);
8414         LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8415             af_trigger->trigger_id);
8416 
8417         IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8418             mAfTrigger = *af_trigger;
8419             uint32_t fwk_AfState = (uint32_t) *afState;
8420 
8421             // If this is the result for a new trigger, check if there is new early
8422             // af state. If there is, use the last af state for all results
8423             // preceding current partial frame number.
8424             for (auto & pendingRequest : mPendingRequestsList) {
8425                 if (pendingRequest.frame_number < frame_number) {
8426                     pendingRequest.focusStateValid = true;
8427                     pendingRequest.focusState = fwk_AfState;
8428                 } else if (pendingRequest.frame_number == frame_number) {
8429                     IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8430                         // Check if early AF state for trigger exists. If yes, send AF state as
8431                         // partial result for better latency.
8432                         uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8433                         pendingRequest.focusStateSent = true;
8434                         camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8435                         LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8436                                  frame_number, fwkEarlyAfState);
8437                     }
8438                 }
8439             }
8440         }
8441     }
8442     camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8443         &mAfTrigger.trigger, 1);
8444     camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8445 
8446     IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8447         /*af regions*/
8448         cam_rect_t hAfRect = hAfRegions->rect;
8449         int32_t afRegions[REGIONS_TUPLE_COUNT];
8450         // Adjust crop region from sensor output coordinate system to active
8451         // array coordinate system.
8452         mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8453                 hAfRect.width, hAfRect.height);
8454 
8455         convertToRegions(hAfRect, afRegions, hAfRegions->weight);
8456         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8457                 REGIONS_TUPLE_COUNT);
8458         LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8459                  afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8460                 hAfRect.left, hAfRect.top, hAfRect.width,
8461                 hAfRect.height);
8462     }
8463 
8464     // AF region confidence
8465     IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8466         camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8467     }
8468 
8469     IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8470         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8471                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8472         if (NAME_NOT_FOUND != val) {
8473             uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8474             camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8475             LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8476         } else {
8477             LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8478         }
8479     }
8480 
8481     uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8482     uint32_t aeMode = CAM_AE_MODE_MAX;
8483     int32_t flashMode = CAM_FLASH_MODE_MAX;
8484     int32_t redeye = -1;
8485     IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8486         aeMode = *pAeMode;
8487     }
8488     IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8489         flashMode = *pFlashMode;
8490     }
8491     IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8492         redeye = *pRedeye;
8493     }
8494 
8495     if (1 == redeye) {
8496         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8497         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8498     } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8499         int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8500                 flashMode);
8501         if (NAME_NOT_FOUND != val) {
8502             fwk_aeMode = (uint8_t)val;
8503             camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8504         } else {
8505             LOGE("Unsupported flash mode %d", flashMode);
8506         }
8507     } else if (aeMode == CAM_AE_MODE_ON) {
8508         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8509         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8510     } else if (aeMode == CAM_AE_MODE_OFF) {
8511         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8512         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8513     } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8514         fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8515         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8516     } else {
8517         LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8518               "flashMode:%d, aeMode:%u!!!",
8519                  redeye, flashMode, aeMode);
8520     }
8521     if (mInstantAEC) {
8522         // Increment frame Idx count untill a bound reached for instant AEC.
8523         mInstantAecFrameIdxCount++;
8524         IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8525                 CAM_INTF_META_AEC_INFO, metadata) {
8526             LOGH("ae_params->settled = %d",ae_params->settled);
8527             // If AEC settled, or if number of frames reached bound value,
8528             // should reset instant AEC.
8529             if (ae_params->settled ||
8530                     (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8531                 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8532                 mInstantAEC = false;
8533                 mResetInstantAEC = true;
8534                 mInstantAecFrameIdxCount = 0;
8535             }
8536         }
8537     }
8538 
8539     IF_META_AVAILABLE(int32_t, af_tof_confidence,
8540             CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
8541         IF_META_AVAILABLE(int32_t, af_tof_distance,
8542                 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
8543             int32_t fwk_af_tof_confidence = *af_tof_confidence;
8544             int32_t fwk_af_tof_distance = *af_tof_distance;
8545             if (fwk_af_tof_confidence == 1) {
8546                 mSceneDistance = fwk_af_tof_distance;
8547             } else {
8548                 mSceneDistance = -1;
8549             }
8550             LOGD("tof_distance %d, tof_confidence %d, mSceneDistance %d",
8551                      fwk_af_tof_distance, fwk_af_tof_confidence, mSceneDistance);
8552         }
8553     }
8554     camMetadata.update(NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE, &mSceneDistance, 1);
8555 
8556     resultMetadata = camMetadata.release();
8557     return resultMetadata;
8558 }
8559 
8560 /*===========================================================================
8561  * FUNCTION   : dumpMetadataToFile
8562  *
8563  * DESCRIPTION: Dumps tuning metadata to file system
8564  *
8565  * PARAMETERS :
8566  *   @meta           : tuning metadata
8567  *   @dumpFrameCount : current dump frame count
8568  *   @enabled        : Enable mask
8569  *
8570  *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,bool enabled,const char * type,uint32_t frameNumber)8571 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8572                                                    uint32_t &dumpFrameCount,
8573                                                    bool enabled,
8574                                                    const char *type,
8575                                                    uint32_t frameNumber)
8576 {
8577     //Some sanity checks
8578     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8579         LOGE("Tuning sensor data size bigger than expected %d: %d",
8580               meta.tuning_sensor_data_size,
8581               TUNING_SENSOR_DATA_MAX);
8582         return;
8583     }
8584 
8585     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8586         LOGE("Tuning VFE data size bigger than expected %d: %d",
8587               meta.tuning_vfe_data_size,
8588               TUNING_VFE_DATA_MAX);
8589         return;
8590     }
8591 
8592     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8593         LOGE("Tuning CPP data size bigger than expected %d: %d",
8594               meta.tuning_cpp_data_size,
8595               TUNING_CPP_DATA_MAX);
8596         return;
8597     }
8598 
8599     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8600         LOGE("Tuning CAC data size bigger than expected %d: %d",
8601               meta.tuning_cac_data_size,
8602               TUNING_CAC_DATA_MAX);
8603         return;
8604     }
8605     //
8606 
8607     if(enabled){
8608         char timeBuf[FILENAME_MAX];
8609         char buf[FILENAME_MAX];
8610         memset(buf, 0, sizeof(buf));
8611         memset(timeBuf, 0, sizeof(timeBuf));
8612         time_t current_time;
8613         struct tm * timeinfo;
8614         time (&current_time);
8615         timeinfo = localtime (&current_time);
8616         if (timeinfo != NULL) {
8617             strftime (timeBuf, sizeof(timeBuf),
8618                     QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8619         }
8620         String8 filePath(timeBuf);
8621         snprintf(buf,
8622                 sizeof(buf),
8623                 "%dm_%s_%d.bin",
8624                 dumpFrameCount,
8625                 type,
8626                 frameNumber);
8627         filePath.append(buf);
8628         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8629         if (file_fd >= 0) {
8630             ssize_t written_len = 0;
8631             meta.tuning_data_version = TUNING_DATA_VERSION;
8632             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8633             written_len += write(file_fd, data, sizeof(uint32_t));
8634             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8635             LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8636             written_len += write(file_fd, data, sizeof(uint32_t));
8637             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8638             LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8639             written_len += write(file_fd, data, sizeof(uint32_t));
8640             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8641             LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8642             written_len += write(file_fd, data, sizeof(uint32_t));
8643             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8644             LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8645             written_len += write(file_fd, data, sizeof(uint32_t));
8646             meta.tuning_mod3_data_size = 0;
8647             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8648             LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8649             written_len += write(file_fd, data, sizeof(uint32_t));
8650             size_t total_size = meta.tuning_sensor_data_size;
8651             data = (void *)((uint8_t *)&meta.data);
8652             written_len += write(file_fd, data, total_size);
8653             total_size = meta.tuning_vfe_data_size;
8654             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8655             written_len += write(file_fd, data, total_size);
8656             total_size = meta.tuning_cpp_data_size;
8657             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8658             written_len += write(file_fd, data, total_size);
8659             total_size = meta.tuning_cac_data_size;
8660             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8661             written_len += write(file_fd, data, total_size);
8662             close(file_fd);
8663         }else {
8664             LOGE("fail to open file for metadata dumping");
8665         }
8666     }
8667 }
8668 
8669 /*===========================================================================
8670  * FUNCTION   : cleanAndSortStreamInfo
8671  *
8672  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8673  *              and sort them such that raw stream is at the end of the list
8674  *              This is a workaround for camera daemon constraint.
8675  *
8676  * PARAMETERS : None
8677  *
8678  *==========================================================================*/
cleanAndSortStreamInfo()8679 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8680 {
8681     List<stream_info_t *> newStreamInfo;
8682 
8683     /*clean up invalid streams*/
8684     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8685             it != mStreamInfo.end();) {
8686         if(((*it)->status) == INVALID){
8687             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8688             delete channel;
8689             free(*it);
8690             it = mStreamInfo.erase(it);
8691         } else {
8692             it++;
8693         }
8694     }
8695 
8696     // Move preview/video/callback/snapshot streams into newList
8697     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8698             it != mStreamInfo.end();) {
8699         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8700                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8701                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8702             newStreamInfo.push_back(*it);
8703             it = mStreamInfo.erase(it);
8704         } else
8705             it++;
8706     }
8707     // Move raw streams into newList
8708     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8709             it != mStreamInfo.end();) {
8710         newStreamInfo.push_back(*it);
8711         it = mStreamInfo.erase(it);
8712     }
8713 
8714     mStreamInfo = newStreamInfo;
8715 
8716     // Make sure that stream IDs are unique.
8717     uint32_t id = 0;
8718     for (auto streamInfo : mStreamInfo) {
8719         streamInfo->id = id++;
8720     }
8721 
8722 }
8723 
8724 /*===========================================================================
8725  * FUNCTION   : extractJpegMetadata
8726  *
8727  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8728  *              JPEG metadata is cached in HAL, and return as part of capture
8729  *              result when metadata is returned from camera daemon.
8730  *
8731  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8732  *              @request:      capture request
8733  *
8734  *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)8735 void QCamera3HardwareInterface::extractJpegMetadata(
8736         CameraMetadata& jpegMetadata,
8737         const camera3_capture_request_t *request)
8738 {
8739     CameraMetadata frame_settings;
8740     frame_settings = request->settings;
8741 
8742     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8743         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8744                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8745                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8746 
8747     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8748         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8749                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8750                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8751 
8752     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8753         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8754                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8755                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8756 
8757     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8758         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8759                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8760                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8761 
8762     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8763         jpegMetadata.update(ANDROID_JPEG_QUALITY,
8764                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8765                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8766 
8767     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8768         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8769                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8770                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8771 
8772     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8773         int32_t thumbnail_size[2];
8774         thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8775         thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8776         if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8777             int32_t orientation =
8778                   frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
8779             if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
8780                //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8781                int32_t temp;
8782                temp = thumbnail_size[0];
8783                thumbnail_size[0] = thumbnail_size[1];
8784                thumbnail_size[1] = temp;
8785             }
8786          }
8787          jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8788                 thumbnail_size,
8789                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8790     }
8791 
8792 }
8793 
8794 /*===========================================================================
8795  * FUNCTION   : convertToRegions
8796  *
8797  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8798  *
8799  * PARAMETERS :
8800  *   @rect   : cam_rect_t struct to convert
8801  *   @region : int32_t destination array
8802  *   @weight : if we are converting from cam_area_t, weight is valid
8803  *             else weight = -1
8804  *
8805  *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)8806 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8807         int32_t *region, int weight)
8808 {
8809     region[FACE_LEFT] = rect.left;
8810     region[FACE_TOP] = rect.top;
8811     region[FACE_RIGHT] = rect.left + rect.width;
8812     region[FACE_BOTTOM] = rect.top + rect.height;
8813     if (weight > -1) {
8814         region[FACE_WEIGHT] = weight;
8815     }
8816 }
8817 
8818 /*===========================================================================
8819  * FUNCTION   : convertFromRegions
8820  *
8821  * DESCRIPTION: helper method to convert from array to cam_rect_t
8822  *
8823  * PARAMETERS :
8824  *   @rect   : cam_rect_t struct to convert
8825  *   @region : int32_t destination array
8826  *   @weight : if we are converting from cam_area_t, weight is valid
8827  *             else weight = -1
8828  *
8829  *==========================================================================*/
convertFromRegions(cam_area_t & roi,const CameraMetadata & frame_settings,uint32_t tag)8830 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
8831         const CameraMetadata &frame_settings, uint32_t tag)
8832 {
8833     int32_t x_min = frame_settings.find(tag).data.i32[0];
8834     int32_t y_min = frame_settings.find(tag).data.i32[1];
8835     int32_t x_max = frame_settings.find(tag).data.i32[2];
8836     int32_t y_max = frame_settings.find(tag).data.i32[3];
8837     roi.weight = frame_settings.find(tag).data.i32[4];
8838     roi.rect.left = x_min;
8839     roi.rect.top = y_min;
8840     roi.rect.width = x_max - x_min;
8841     roi.rect.height = y_max - y_min;
8842 }
8843 
8844 /*===========================================================================
8845  * FUNCTION   : resetIfNeededROI
8846  *
8847  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8848  *              crop region
8849  *
8850  * PARAMETERS :
8851  *   @roi       : cam_area_t struct to resize
8852  *   @scalerCropRegion : cam_crop_region_t region to compare against
8853  *
8854  *
8855  *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)8856 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8857                                                  const cam_crop_region_t* scalerCropRegion)
8858 {
8859     int32_t roi_x_max = roi->rect.width + roi->rect.left;
8860     int32_t roi_y_max = roi->rect.height + roi->rect.top;
8861     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8862     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8863 
8864     /* According to spec weight = 0 is used to indicate roi needs to be disabled
8865      * without having this check the calculations below to validate if the roi
8866      * is inside scalar crop region will fail resulting in the roi not being
8867      * reset causing algorithm to continue to use stale roi window
8868      */
8869     if (roi->weight == 0) {
8870         return true;
8871     }
8872 
8873     if ((roi_x_max < scalerCropRegion->left) ||
8874         // right edge of roi window is left of scalar crop's left edge
8875         (roi_y_max < scalerCropRegion->top)  ||
8876         // bottom edge of roi window is above scalar crop's top edge
8877         (roi->rect.left > crop_x_max) ||
8878         // left edge of roi window is beyond(right) of scalar crop's right edge
8879         (roi->rect.top > crop_y_max)){
8880         // top edge of roi windo is above scalar crop's top edge
8881         return false;
8882     }
8883     if (roi->rect.left < scalerCropRegion->left) {
8884         roi->rect.left = scalerCropRegion->left;
8885     }
8886     if (roi->rect.top < scalerCropRegion->top) {
8887         roi->rect.top = scalerCropRegion->top;
8888     }
8889     if (roi_x_max > crop_x_max) {
8890         roi_x_max = crop_x_max;
8891     }
8892     if (roi_y_max > crop_y_max) {
8893         roi_y_max = crop_y_max;
8894     }
8895     roi->rect.width = roi_x_max - roi->rect.left;
8896     roi->rect.height = roi_y_max - roi->rect.top;
8897     return true;
8898 }
8899 
8900 /*===========================================================================
8901  * FUNCTION   : convertLandmarks
8902  *
8903  * DESCRIPTION: helper method to extract the landmarks from face detection info
8904  *
8905  * PARAMETERS :
8906  *   @landmark_data : input landmark data to be converted
8907  *   @landmarks : int32_t destination array
8908  *
8909  *
8910  *==========================================================================*/
convertLandmarks(cam_face_landmarks_info_t landmark_data,int32_t * landmarks)8911 void QCamera3HardwareInterface::convertLandmarks(
8912         cam_face_landmarks_info_t landmark_data,
8913         int32_t *landmarks)
8914 {
8915     if (landmark_data.is_left_eye_valid) {
8916         landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8917         landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8918     } else {
8919         landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8920         landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8921     }
8922 
8923     if (landmark_data.is_right_eye_valid) {
8924         landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8925         landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8926     } else {
8927         landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8928         landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8929     }
8930 
8931     if (landmark_data.is_mouth_valid) {
8932         landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8933         landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8934     } else {
8935         landmarks[MOUTH_X] = FACE_INVALID_POINT;
8936         landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8937     }
8938 }
8939 
8940 /*===========================================================================
8941  * FUNCTION   : setInvalidLandmarks
8942  *
8943  * DESCRIPTION: helper method to set invalid landmarks
8944  *
8945  * PARAMETERS :
8946  *   @landmarks : int32_t destination array
8947  *
8948  *
8949  *==========================================================================*/
setInvalidLandmarks(int32_t * landmarks)8950 void QCamera3HardwareInterface::setInvalidLandmarks(
8951         int32_t *landmarks)
8952 {
8953     landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8954     landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8955     landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8956     landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8957     landmarks[MOUTH_X] = FACE_INVALID_POINT;
8958     landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8959 }
8960 
8961 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
8962 
8963 /*===========================================================================
8964  * FUNCTION   : getCapabilities
8965  *
8966  * DESCRIPTION: query camera capability from back-end
8967  *
8968  * PARAMETERS :
8969  *   @ops  : mm-interface ops structure
8970  *   @cam_handle  : camera handle for which we need capability
8971  *
8972  * RETURN     : ptr type of capability structure
8973  *              capability for success
8974  *              NULL for failure
8975  *==========================================================================*/
getCapabilities(mm_camera_ops_t * ops,uint32_t cam_handle)8976 cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8977         uint32_t cam_handle)
8978 {
8979     int rc = NO_ERROR;
8980     QCamera3HeapMemory *capabilityHeap = NULL;
8981     cam_capability_t *cap_ptr = NULL;
8982 
8983     if (ops == NULL) {
8984         LOGE("Invalid arguments");
8985         return NULL;
8986     }
8987 
8988     capabilityHeap = new QCamera3HeapMemory(1);
8989     if (capabilityHeap == NULL) {
8990         LOGE("creation of capabilityHeap failed");
8991         return NULL;
8992     }
8993 
8994     /* Allocate memory for capability buffer */
8995     rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8996     if(rc != OK) {
8997         LOGE("No memory for cappability");
8998         goto allocate_failed;
8999     }
9000 
9001     /* Map memory for capability buffer */
9002     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
9003 
9004     rc = ops->map_buf(cam_handle,
9005             CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
9006             sizeof(cam_capability_t), capabilityHeap->getPtr(0));
9007     if(rc < 0) {
9008         LOGE("failed to map capability buffer");
9009         rc = FAILED_TRANSACTION;
9010         goto map_failed;
9011     }
9012 
9013     /* Query Capability */
9014     rc = ops->query_capability(cam_handle);
9015     if(rc < 0) {
9016         LOGE("failed to query capability");
9017         rc = FAILED_TRANSACTION;
9018         goto query_failed;
9019     }
9020 
9021     cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
9022     if (cap_ptr == NULL) {
9023         LOGE("out of memory");
9024         rc = NO_MEMORY;
9025         goto query_failed;
9026     }
9027 
9028     memset(cap_ptr, 0, sizeof(cam_capability_t));
9029     memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
9030 
9031     int index;
9032     for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
9033         cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
9034         p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
9035         p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
9036     }
9037 
9038 query_failed:
9039     ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
9040 map_failed:
9041     capabilityHeap->deallocate();
9042 allocate_failed:
9043     delete capabilityHeap;
9044 
9045     if (rc != NO_ERROR) {
9046         return NULL;
9047     } else {
9048         return cap_ptr;
9049     }
9050 }
9051 
9052 /*===========================================================================
9053  * FUNCTION   : initCapabilities
9054  *
9055  * DESCRIPTION: initialize camera capabilities in static data struct
9056  *
9057  * PARAMETERS :
9058  *   @cameraId  : camera Id
9059  *
9060  * RETURN     : int32_t type of status
9061  *              NO_ERROR  -- success
9062  *              none-zero failure code
9063  *==========================================================================*/
initCapabilities(uint32_t cameraId)9064 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
9065 {
9066     int rc = 0;
9067     mm_camera_vtbl_t *cameraHandle = NULL;
9068     uint32_t handle = 0;
9069 
9070     rc = camera_open((uint8_t)cameraId, &cameraHandle);
9071     if (rc) {
9072         LOGE("camera_open failed. rc = %d", rc);
9073         goto open_failed;
9074     }
9075     if (!cameraHandle) {
9076         LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
9077         goto open_failed;
9078     }
9079 
9080     handle = get_main_camera_handle(cameraHandle->camera_handle);
9081     gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
9082     if (gCamCapability[cameraId] == NULL) {
9083         rc = FAILED_TRANSACTION;
9084         goto failed_op;
9085     }
9086 
9087     gCamCapability[cameraId]->camera_index = cameraId;
9088     if (is_dual_camera_by_idx(cameraId)) {
9089         handle = get_aux_camera_handle(cameraHandle->camera_handle);
9090         gCamCapability[cameraId]->aux_cam_cap =
9091                 getCapabilities(cameraHandle->ops, handle);
9092         if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
9093             rc = FAILED_TRANSACTION;
9094             free(gCamCapability[cameraId]);
9095             goto failed_op;
9096         }
9097 
9098         // Copy the main camera capability to main_cam_cap struct
9099         gCamCapability[cameraId]->main_cam_cap =
9100                         (cam_capability_t *)malloc(sizeof(cam_capability_t));
9101         if (gCamCapability[cameraId]->main_cam_cap == NULL) {
9102             LOGE("out of memory");
9103             rc = NO_MEMORY;
9104             goto failed_op;
9105         }
9106         memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
9107                 sizeof(cam_capability_t));
9108     }
9109 failed_op:
9110     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9111     cameraHandle = NULL;
9112 open_failed:
9113     return rc;
9114 }
9115 
9116 /*==========================================================================
9117  * FUNCTION   : get3Aversion
9118  *
9119  * DESCRIPTION: get the Q3A S/W version
9120  *
9121  * PARAMETERS :
9122  *  @sw_version: Reference of Q3A structure which will hold version info upon
9123  *               return
9124  *
9125  * RETURN     : None
9126  *
9127  *==========================================================================*/
get3AVersion(cam_q3a_version_t & sw_version)9128 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9129 {
9130     if(gCamCapability[mCameraId])
9131         sw_version = gCamCapability[mCameraId]->q3a_version;
9132     else
9133         LOGE("Capability structure NULL!");
9134 }
9135 
9136 
9137 /*===========================================================================
9138  * FUNCTION   : initParameters
9139  *
9140  * DESCRIPTION: initialize camera parameters
9141  *
9142  * PARAMETERS :
9143  *
9144  * RETURN     : int32_t type of status
9145  *              NO_ERROR  -- success
9146  *              none-zero failure code
9147  *==========================================================================*/
initParameters()9148 int QCamera3HardwareInterface::initParameters()
9149 {
9150     int rc = 0;
9151 
9152     //Allocate Set Param Buffer
9153     mParamHeap = new QCamera3HeapMemory(1);
9154     rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9155     if(rc != OK) {
9156         rc = NO_MEMORY;
9157         LOGE("Failed to allocate SETPARM Heap memory");
9158         delete mParamHeap;
9159         mParamHeap = NULL;
9160         return rc;
9161     }
9162 
9163     //Map memory for parameters buffer
9164     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9165             CAM_MAPPING_BUF_TYPE_PARM_BUF,
9166             mParamHeap->getFd(0),
9167             sizeof(metadata_buffer_t),
9168             (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9169     if(rc < 0) {
9170         LOGE("failed to map SETPARM buffer");
9171         rc = FAILED_TRANSACTION;
9172         mParamHeap->deallocate();
9173         delete mParamHeap;
9174         mParamHeap = NULL;
9175         return rc;
9176     }
9177 
9178     mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9179 
9180     mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9181     return rc;
9182 }
9183 
9184 /*===========================================================================
9185  * FUNCTION   : deinitParameters
9186  *
9187  * DESCRIPTION: de-initialize camera parameters
9188  *
9189  * PARAMETERS :
9190  *
9191  * RETURN     : NONE
9192  *==========================================================================*/
deinitParameters()9193 void QCamera3HardwareInterface::deinitParameters()
9194 {
9195     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9196             CAM_MAPPING_BUF_TYPE_PARM_BUF);
9197 
9198     mParamHeap->deallocate();
9199     delete mParamHeap;
9200     mParamHeap = NULL;
9201 
9202     mParameters = NULL;
9203 
9204     free(mPrevParameters);
9205     mPrevParameters = NULL;
9206 }
9207 
9208 /*===========================================================================
9209  * FUNCTION   : calcMaxJpegSize
9210  *
9211  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9212  *
9213  * PARAMETERS :
9214  *
9215  * RETURN     : max_jpeg_size
9216  *==========================================================================*/
calcMaxJpegSize(uint32_t camera_id)9217 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9218 {
9219     size_t max_jpeg_size = 0;
9220     size_t temp_width, temp_height;
9221     size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9222             MAX_SIZES_CNT);
9223     for (size_t i = 0; i < count; i++) {
9224         temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9225         temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9226         if (temp_width * temp_height > max_jpeg_size ) {
9227             max_jpeg_size = temp_width * temp_height;
9228         }
9229     }
9230     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9231     return max_jpeg_size;
9232 }
9233 
9234 /*===========================================================================
9235  * FUNCTION   : getMaxRawSize
9236  *
9237  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9238  *
9239  * PARAMETERS :
9240  *
9241  * RETURN     : Largest supported Raw Dimension
9242  *==========================================================================*/
getMaxRawSize(uint32_t camera_id)9243 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9244 {
9245     int max_width = 0;
9246     cam_dimension_t maxRawSize;
9247 
9248     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9249     for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9250         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9251             max_width = gCamCapability[camera_id]->raw_dim[i].width;
9252             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9253         }
9254     }
9255     return maxRawSize;
9256 }
9257 
9258 
9259 /*===========================================================================
9260  * FUNCTION   : calcMaxJpegDim
9261  *
9262  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9263  *
9264  * PARAMETERS :
9265  *
9266  * RETURN     : max_jpeg_dim
9267  *==========================================================================*/
calcMaxJpegDim()9268 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9269 {
9270     cam_dimension_t max_jpeg_dim;
9271     cam_dimension_t curr_jpeg_dim;
9272     max_jpeg_dim.width = 0;
9273     max_jpeg_dim.height = 0;
9274     curr_jpeg_dim.width = 0;
9275     curr_jpeg_dim.height = 0;
9276     for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9277         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9278         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9279         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9280             max_jpeg_dim.width * max_jpeg_dim.height ) {
9281             max_jpeg_dim.width = curr_jpeg_dim.width;
9282             max_jpeg_dim.height = curr_jpeg_dim.height;
9283         }
9284     }
9285     return max_jpeg_dim;
9286 }
9287 
9288 /*===========================================================================
9289  * FUNCTION   : addStreamConfig
9290  *
9291  * DESCRIPTION: adds the stream configuration to the array
9292  *
9293  * PARAMETERS :
9294  * @available_stream_configs : pointer to stream configuration array
9295  * @scalar_format            : scalar format
9296  * @dim                      : configuration dimension
9297  * @config_type              : input or output configuration type
9298  *
9299  * RETURN     : NONE
9300  *==========================================================================*/
addStreamConfig(Vector<int32_t> & available_stream_configs,int32_t scalar_format,const cam_dimension_t & dim,int32_t config_type)9301 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9302         int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9303 {
9304     available_stream_configs.add(scalar_format);
9305     available_stream_configs.add(dim.width);
9306     available_stream_configs.add(dim.height);
9307     available_stream_configs.add(config_type);
9308 }
9309 
9310 /*===========================================================================
9311  * FUNCTION   : suppportBurstCapture
9312  *
9313  * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9314  *
9315  * PARAMETERS :
9316  *   @cameraId  : camera Id
9317  *
9318  * RETURN     : true if camera supports BURST_CAPTURE
9319  *              false otherwise
9320  *==========================================================================*/
supportBurstCapture(uint32_t cameraId)9321 bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9322 {
9323     const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9324     const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9325     const int32_t highResWidth = 3264;
9326     const int32_t highResHeight = 2448;
9327 
9328     if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9329         // Maximum resolution images cannot be captured at >= 10fps
9330         // -> not supporting BURST_CAPTURE
9331         return false;
9332     }
9333 
9334     if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9335         // Maximum resolution images can be captured at >= 20fps
9336         // --> supporting BURST_CAPTURE
9337         return true;
9338     }
9339 
9340     // Find the smallest highRes resolution, or largest resolution if there is none
9341     size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9342             MAX_SIZES_CNT);
9343     size_t highRes = 0;
9344     while ((highRes + 1 < totalCnt) &&
9345             (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9346             gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9347             highResWidth * highResHeight)) {
9348         highRes++;
9349     }
9350     if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9351         return true;
9352     } else {
9353         return false;
9354     }
9355 }
9356 
9357 /*===========================================================================
9358  * FUNCTION   : getPDStatIndex
9359  *
9360  * DESCRIPTION: Return the meta raw phase detection statistics index if present
9361  *
9362  * PARAMETERS :
9363  *   @caps    : camera capabilities
9364  *
9365  * RETURN     : int32_t type
9366  *              non-negative - on success
9367  *              -1 - on failure
9368  *==========================================================================*/
getPDStatIndex(cam_capability_t * caps)9369 int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9370     if (nullptr == caps) {
9371         return -1;
9372     }
9373 
9374     uint32_t metaRawCount = caps->meta_raw_channel_count;
9375     int32_t ret = -1;
9376     for (size_t i = 0; i < metaRawCount; i++) {
9377         if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9378             ret = i;
9379             break;
9380         }
9381     }
9382 
9383     return ret;
9384 }
9385 
9386 /*===========================================================================
9387  * FUNCTION   : initStaticMetadata
9388  *
9389  * DESCRIPTION: initialize the static metadata
9390  *
9391  * PARAMETERS :
9392  *   @cameraId  : camera Id
9393  *
9394  * RETURN     : int32_t type of status
9395  *              0  -- success
9396  *              non-zero failure code
9397  *==========================================================================*/
initStaticMetadata(uint32_t cameraId)9398 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9399 {
9400     int rc = 0;
9401     CameraMetadata staticInfo;
9402     size_t count = 0;
9403     bool limitedDevice = false;
9404     char prop[PROPERTY_VALUE_MAX];
9405     bool supportBurst = false;
9406     Vector<int32_t> available_characteristics_keys;
9407 
9408     supportBurst = supportBurstCapture(cameraId);
9409 
9410     /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9411      * guaranteed or if min fps of max resolution is less than 20 fps, its
9412      * advertised as limited device*/
9413     limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9414             (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9415             (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9416             !supportBurst;
9417 
9418     uint8_t supportedHwLvl = limitedDevice ?
9419             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
9420 #ifndef USE_HAL_3_3
9421             // LEVEL_3 - This device will support level 3.
9422             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9423 #else
9424             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
9425 #endif
9426 
9427     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9428             &supportedHwLvl, 1);
9429 
9430     bool facingBack = false;
9431     if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9432             (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9433         facingBack = true;
9434     }
9435     /*HAL 3 only*/
9436     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9437                     &gCamCapability[cameraId]->min_focus_distance, 1);
9438 
9439     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9440                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
9441 
9442     /*should be using focal lengths but sensor doesn't provide that info now*/
9443     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9444                       &gCamCapability[cameraId]->focal_length,
9445                       1);
9446 
9447     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9448             gCamCapability[cameraId]->apertures,
9449             MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9450 
9451     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9452             gCamCapability[cameraId]->filter_densities,
9453             MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9454 
9455 
9456     uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9457     size_t mode_count =
9458         MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9459     for (size_t i = 0; i < mode_count; i++) {
9460       available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9461     }
9462     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9463             available_opt_stab_modes, mode_count);
9464 
9465     int32_t lens_shading_map_size[] = {
9466             MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9467             MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9468     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9469                       lens_shading_map_size,
9470                       sizeof(lens_shading_map_size)/sizeof(int32_t));
9471 
9472     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9473             gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9474 
9475     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9476             gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9477 
9478     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9479             &gCamCapability[cameraId]->max_frame_duration, 1);
9480 
9481     camera_metadata_rational baseGainFactor = {
9482             gCamCapability[cameraId]->base_gain_factor.numerator,
9483             gCamCapability[cameraId]->base_gain_factor.denominator};
9484     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9485                       &baseGainFactor, 1);
9486 
9487     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9488                      (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9489 
9490     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9491             gCamCapability[cameraId]->pixel_array_size.height};
9492     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9493                       pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9494 
9495     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9496             gCamCapability[cameraId]->active_array_size.top,
9497             gCamCapability[cameraId]->active_array_size.width,
9498             gCamCapability[cameraId]->active_array_size.height};
9499     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9500             active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9501 
9502     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9503             &gCamCapability[cameraId]->white_level, 1);
9504 
9505     int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9506     adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9507             gCamCapability[cameraId]->color_arrangement);
9508     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
9509             adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
9510 
9511 #ifndef USE_HAL_3_3
9512     bool hasBlackRegions = false;
9513     if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9514         LOGW("black_region_count: %d is bounded to %d",
9515             gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9516         gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9517     }
9518     if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9519         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9520         for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9521             opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9522         }
9523         staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9524                 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9525         hasBlackRegions = true;
9526     }
9527 #endif
9528     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9529             &gCamCapability[cameraId]->flash_charge_duration, 1);
9530 
9531     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9532             &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9533 
9534     uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9535             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9536             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
9537     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9538             &timestampSource, 1);
9539 
9540     //update histogram vendor data
9541     staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
9542             &gCamCapability[cameraId]->histogram_size, 1);
9543 
9544     staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
9545             &gCamCapability[cameraId]->max_histogram_count, 1);
9546 
9547     //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9548     //so that app can request fewer number of bins than the maximum supported.
9549     std::vector<int32_t> histBins;
9550     int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9551     histBins.push_back(maxHistBins);
9552     while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9553            (maxHistBins & 0x1) == 0) {
9554         histBins.push_back(maxHistBins >> 1);
9555         maxHistBins >>= 1;
9556     }
9557     staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9558             histBins.data(), histBins.size());
9559     if (!histBins.empty()) {
9560         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS);
9561     }
9562 
9563     int32_t sharpness_map_size[] = {
9564             gCamCapability[cameraId]->sharpness_map_size.width,
9565             gCamCapability[cameraId]->sharpness_map_size.height};
9566 
9567     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9568             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9569 
9570     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9571             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9572 
9573     int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9574     if (0 <= indexPD) {
9575         // Advertise PD stats data as part of the Depth capabilities
9576         int32_t depthWidth =
9577                 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9578         int32_t depthHeight =
9579                 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9580         int32_t depthStride =
9581                 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
9582         int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9583         assert(0 < depthSamplesCount);
9584         staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9585                 &depthSamplesCount, 1);
9586 
9587         int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9588                 depthHeight,
9589                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9590                 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9591                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9592         staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9593                 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9594 
9595         int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9596                 depthHeight, 33333333,
9597                 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9598         staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9599                 depthMinDuration,
9600                 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9601 
9602         int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9603                 depthHeight, 0,
9604                 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9605         staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9606                 depthStallDuration,
9607                 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9608 
9609         uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9610         staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9611 
9612         int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9613         staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9614                 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
9615         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS);
9616 
9617         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
9618                 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.right_gain_map),
9619                 sizeof(gCamCapability[cameraId]->pdaf_cal.right_gain_map));
9620         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS);
9621 
9622         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
9623                 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.left_gain_map),
9624                 sizeof(gCamCapability[cameraId]->pdaf_cal.left_gain_map));
9625         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS);
9626 
9627         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
9628                 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.conversion_coeff),
9629                 sizeof(gCamCapability[cameraId]->pdaf_cal.conversion_coeff));
9630         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF);
9631 
9632     }
9633 
9634     int32_t scalar_formats[] = {
9635             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9636             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9637             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9638             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9639             HAL_PIXEL_FORMAT_RAW10,
9640             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
9641     size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9642     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9643             scalar_formats_count);
9644 
9645     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9646     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9647     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9648             count, MAX_SIZES_CNT, available_processed_sizes);
9649     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9650             available_processed_sizes, count * 2);
9651 
9652     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9653     count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9654     makeTable(gCamCapability[cameraId]->raw_dim,
9655             count, MAX_SIZES_CNT, available_raw_sizes);
9656     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9657             available_raw_sizes, count * 2);
9658 
9659     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9660     count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9661     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9662             count, MAX_SIZES_CNT, available_fps_ranges);
9663     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9664             available_fps_ranges, count * 2);
9665 
9666     camera_metadata_rational exposureCompensationStep = {
9667             gCamCapability[cameraId]->exp_compensation_step.numerator,
9668             gCamCapability[cameraId]->exp_compensation_step.denominator};
9669     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9670                       &exposureCompensationStep, 1);
9671 
9672     Vector<uint8_t> availableVstabModes;
9673     availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9674     char eis_prop[PROPERTY_VALUE_MAX];
9675     bool eisSupported = false;
9676     memset(eis_prop, 0, sizeof(eis_prop));
9677     property_get("persist.camera.eis.enable", eis_prop, "1");
9678     uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
9679     count = IS_TYPE_MAX;
9680     count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9681     for (size_t i = 0; i < count; i++) {
9682         if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9683             (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9684             eisSupported = true;
9685             break;
9686         }
9687     }
9688     if (facingBack && eis_prop_set && eisSupported) {
9689         availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9690     }
9691     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9692                       availableVstabModes.array(), availableVstabModes.size());
9693 
9694     /*HAL 1 and HAL 3 common*/
9695     uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9696     uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9697     uint32_t minZoomStep = 100; //as per HAL1/API1 spec
9698     // Cap the max zoom to the max preferred value
9699     float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
9700     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9701             &maxZoom, 1);
9702 
9703     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9704     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9705 
9706     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9707     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9708         max3aRegions[2] = 0; /* AF not supported */
9709     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9710             max3aRegions, 3);
9711 
9712     /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9713     memset(prop, 0, sizeof(prop));
9714     property_get("persist.camera.facedetect", prop, "1");
9715     uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9716     LOGD("Support face detection mode: %d",
9717              supportedFaceDetectMode);
9718 
9719     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
9720     /* support mode should be OFF if max number of face is 0 */
9721     if (maxFaces <= 0) {
9722         supportedFaceDetectMode = 0;
9723     }
9724     Vector<uint8_t> availableFaceDetectModes;
9725     availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9726     if (supportedFaceDetectMode == 1) {
9727         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9728     } else if (supportedFaceDetectMode == 2) {
9729         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9730     } else if (supportedFaceDetectMode == 3) {
9731         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9732         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9733     } else {
9734         maxFaces = 0;
9735     }
9736     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9737             availableFaceDetectModes.array(),
9738             availableFaceDetectModes.size());
9739     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9740             (int32_t *)&maxFaces, 1);
9741     uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9742     staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9743             &face_bsgc, 1);
9744 
9745     int32_t exposureCompensationRange[] = {
9746             gCamCapability[cameraId]->exposure_compensation_min,
9747             gCamCapability[cameraId]->exposure_compensation_max};
9748     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9749             exposureCompensationRange,
9750             sizeof(exposureCompensationRange)/sizeof(int32_t));
9751 
9752     uint8_t lensFacing = (facingBack) ?
9753             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9754     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9755 
9756     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9757                       available_thumbnail_sizes,
9758                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9759 
9760     /*all sizes will be clubbed into this tag*/
9761     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9762     /*android.scaler.availableStreamConfigurations*/
9763     Vector<int32_t> available_stream_configs;
9764     cam_dimension_t active_array_dim;
9765     active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9766     active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
9767 
9768     /*advertise list of input dimensions supported based on below property.
9769     By default all sizes upto 5MP will be advertised.
9770     Note that the setprop resolution format should be WxH.
9771     e.g: adb shell setprop persist.camera.input.minsize 1280x720
9772     To list all supported sizes, setprop needs to be set with "0x0" */
9773     cam_dimension_t minInputSize = {2592,1944}; //5MP
9774     memset(prop, 0, sizeof(prop));
9775     property_get("persist.camera.input.minsize", prop, "2592x1944");
9776     if (strlen(prop) > 0) {
9777         char *saveptr = NULL;
9778         char *token = strtok_r(prop, "x", &saveptr);
9779         if (token != NULL) {
9780             minInputSize.width = atoi(token);
9781         }
9782         token = strtok_r(NULL, "x", &saveptr);
9783         if (token != NULL) {
9784             minInputSize.height = atoi(token);
9785         }
9786     }
9787 
9788     /* Add input/output stream configurations for each scalar formats*/
9789     for (size_t j = 0; j < scalar_formats_count; j++) {
9790         switch (scalar_formats[j]) {
9791         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9792         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9793         case HAL_PIXEL_FORMAT_RAW10:
9794             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9795                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9796                 addStreamConfig(available_stream_configs, scalar_formats[j],
9797                         gCamCapability[cameraId]->raw_dim[i],
9798                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9799             }
9800             break;
9801         case HAL_PIXEL_FORMAT_BLOB:
9802             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9803                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9804                 addStreamConfig(available_stream_configs, scalar_formats[j],
9805                         gCamCapability[cameraId]->picture_sizes_tbl[i],
9806                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9807             }
9808             break;
9809         case HAL_PIXEL_FORMAT_YCbCr_420_888:
9810         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9811         default:
9812             cam_dimension_t largest_picture_size;
9813             memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9814             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9815                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9816                 addStreamConfig(available_stream_configs, scalar_formats[j],
9817                         gCamCapability[cameraId]->picture_sizes_tbl[i],
9818                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9819                 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9820                 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9821                         scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
9822                      if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9823                             >= minInputSize.width) || (gCamCapability[cameraId]->
9824                             picture_sizes_tbl[i].height >= minInputSize.height)) {
9825                          addStreamConfig(available_stream_configs, scalar_formats[j],
9826                                  gCamCapability[cameraId]->picture_sizes_tbl[i],
9827                                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9828                      }
9829                 }
9830             }
9831 
9832             break;
9833         }
9834     }
9835 
9836     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9837                       available_stream_configs.array(), available_stream_configs.size());
9838     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9839     staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9840 
9841     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9842     staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9843 
9844     /* android.scaler.availableMinFrameDurations */
9845     Vector<int64_t> available_min_durations;
9846     for (size_t j = 0; j < scalar_formats_count; j++) {
9847         switch (scalar_formats[j]) {
9848         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9849         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9850         case HAL_PIXEL_FORMAT_RAW10:
9851             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9852                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9853                 available_min_durations.add(scalar_formats[j]);
9854                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9855                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9856                 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9857             }
9858             break;
9859         default:
9860             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9861                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9862                 available_min_durations.add(scalar_formats[j]);
9863                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9864                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9865                 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9866             }
9867             break;
9868         }
9869     }
9870     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9871                       available_min_durations.array(), available_min_durations.size());
9872 
9873     Vector<int32_t> available_hfr_configs;
9874     for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9875         int32_t fps = 0;
9876         switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9877         case CAM_HFR_MODE_60FPS:
9878             fps = 60;
9879             break;
9880         case CAM_HFR_MODE_90FPS:
9881             fps = 90;
9882             break;
9883         case CAM_HFR_MODE_120FPS:
9884             fps = 120;
9885             break;
9886         case CAM_HFR_MODE_150FPS:
9887             fps = 150;
9888             break;
9889         case CAM_HFR_MODE_180FPS:
9890             fps = 180;
9891             break;
9892         case CAM_HFR_MODE_210FPS:
9893             fps = 210;
9894             break;
9895         case CAM_HFR_MODE_240FPS:
9896             fps = 240;
9897             break;
9898         case CAM_HFR_MODE_480FPS:
9899             fps = 480;
9900             break;
9901         case CAM_HFR_MODE_OFF:
9902         case CAM_HFR_MODE_MAX:
9903         default:
9904             break;
9905         }
9906 
9907         /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9908         if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9909             /* For each HFR frame rate, need to advertise one variable fps range
9910              * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9911              * and [120, 120]. While camcorder preview alone is running [30, 120] is
9912              * set by the app. When video recording is started, [120, 120] is
9913              * set. This way sensor configuration does not change when recording
9914              * is started */
9915 
9916             /* (width, height, fps_min, fps_max, batch_size_max) */
9917             for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9918                 j < MAX_SIZES_CNT; j++) {
9919                 available_hfr_configs.add(
9920                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9921                 available_hfr_configs.add(
9922                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9923                 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9924                 available_hfr_configs.add(fps);
9925                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9926 
9927                 /* (width, height, fps_min, fps_max, batch_size_max) */
9928                 available_hfr_configs.add(
9929                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9930                 available_hfr_configs.add(
9931                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9932                 available_hfr_configs.add(fps);
9933                 available_hfr_configs.add(fps);
9934                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9935             }
9936        }
9937     }
9938     //Advertise HFR capability only if the property is set
9939     memset(prop, 0, sizeof(prop));
9940     property_get("persist.camera.hal3hfr.enable", prop, "1");
9941     uint8_t hfrEnable = (uint8_t)atoi(prop);
9942 
9943     if(hfrEnable && available_hfr_configs.array()) {
9944         staticInfo.update(
9945                 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9946                 available_hfr_configs.array(), available_hfr_configs.size());
9947     }
9948 
9949     int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9950     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9951                       &max_jpeg_size, 1);
9952 
9953     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9954     size_t size = 0;
9955     count = CAM_EFFECT_MODE_MAX;
9956     count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9957     for (size_t i = 0; i < count; i++) {
9958         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9959                 gCamCapability[cameraId]->supported_effects[i]);
9960         if (NAME_NOT_FOUND != val) {
9961             avail_effects[size] = (uint8_t)val;
9962             size++;
9963         }
9964     }
9965     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9966                       avail_effects,
9967                       size);
9968 
9969     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9970     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9971     size_t supported_scene_modes_cnt = 0;
9972     count = CAM_SCENE_MODE_MAX;
9973     count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9974     for (size_t i = 0; i < count; i++) {
9975         if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9976                 CAM_SCENE_MODE_OFF) {
9977             int val = lookupFwkName(SCENE_MODES_MAP,
9978                     METADATA_MAP_SIZE(SCENE_MODES_MAP),
9979                     gCamCapability[cameraId]->supported_scene_modes[i]);
9980 
9981             if (NAME_NOT_FOUND != val) {
9982                 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9983                 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9984                 supported_scene_modes_cnt++;
9985             }
9986         }
9987     }
9988     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9989                       avail_scene_modes,
9990                       supported_scene_modes_cnt);
9991 
9992     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
9993     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9994                       supported_scene_modes_cnt,
9995                       CAM_SCENE_MODE_MAX,
9996                       scene_mode_overrides,
9997                       supported_indexes,
9998                       cameraId);
9999 
10000     if (supported_scene_modes_cnt == 0) {
10001         supported_scene_modes_cnt = 1;
10002         avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
10003     }
10004 
10005     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
10006             scene_mode_overrides, supported_scene_modes_cnt * 3);
10007 
10008     uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
10009                                          ANDROID_CONTROL_MODE_AUTO,
10010                                          ANDROID_CONTROL_MODE_USE_SCENE_MODE};
10011     staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
10012             available_control_modes,
10013             3);
10014 
10015     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
10016     size = 0;
10017     count = CAM_ANTIBANDING_MODE_MAX;
10018     count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
10019     for (size_t i = 0; i < count; i++) {
10020         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
10021                 gCamCapability[cameraId]->supported_antibandings[i]);
10022         if (NAME_NOT_FOUND != val) {
10023             avail_antibanding_modes[size] = (uint8_t)val;
10024             size++;
10025         }
10026 
10027     }
10028     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
10029                       avail_antibanding_modes,
10030                       size);
10031 
10032     uint8_t avail_abberation_modes[] = {
10033             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
10034             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
10035             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
10036     count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
10037     count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
10038     if (0 == count) {
10039         //  If no aberration correction modes are available for a device, this advertise OFF mode
10040         size = 1;
10041     } else {
10042         // If count is not zero then atleast one among the FAST or HIGH quality is supported
10043         // So, advertize all 3 modes if atleast any one mode is supported as per the
10044         // new M requirement
10045         size = 3;
10046     }
10047     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10048             avail_abberation_modes,
10049             size);
10050 
10051     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
10052     size = 0;
10053     count = CAM_FOCUS_MODE_MAX;
10054     count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
10055     for (size_t i = 0; i < count; i++) {
10056         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10057                 gCamCapability[cameraId]->supported_focus_modes[i]);
10058         if (NAME_NOT_FOUND != val) {
10059             avail_af_modes[size] = (uint8_t)val;
10060             size++;
10061         }
10062     }
10063     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
10064                       avail_af_modes,
10065                       size);
10066 
10067     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
10068     size = 0;
10069     count = CAM_WB_MODE_MAX;
10070     count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
10071     for (size_t i = 0; i < count; i++) {
10072         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10073                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10074                 gCamCapability[cameraId]->supported_white_balances[i]);
10075         if (NAME_NOT_FOUND != val) {
10076             avail_awb_modes[size] = (uint8_t)val;
10077             size++;
10078         }
10079     }
10080     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
10081                       avail_awb_modes,
10082                       size);
10083 
10084     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
10085     count = CAM_FLASH_FIRING_LEVEL_MAX;
10086     count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
10087             count);
10088     for (size_t i = 0; i < count; i++) {
10089         available_flash_levels[i] =
10090                 gCamCapability[cameraId]->supported_firing_levels[i];
10091     }
10092     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
10093             available_flash_levels, count);
10094 
10095     uint8_t flashAvailable;
10096     if (gCamCapability[cameraId]->flash_available)
10097         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
10098     else
10099         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
10100     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
10101             &flashAvailable, 1);
10102 
10103     Vector<uint8_t> avail_ae_modes;
10104     count = CAM_AE_MODE_MAX;
10105     count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
10106     for (size_t i = 0; i < count; i++) {
10107         uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
10108         if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
10109             aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
10110         }
10111         avail_ae_modes.add(aeMode);
10112     }
10113     if (flashAvailable) {
10114         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
10115         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
10116     }
10117     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
10118                       avail_ae_modes.array(),
10119                       avail_ae_modes.size());
10120 
10121     int32_t sensitivity_range[2];
10122     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
10123     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
10124     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
10125                       sensitivity_range,
10126                       sizeof(sensitivity_range) / sizeof(int32_t));
10127 
10128     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10129                       &gCamCapability[cameraId]->max_analog_sensitivity,
10130                       1);
10131 
10132     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10133     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10134                       &sensor_orientation,
10135                       1);
10136 
10137     int32_t max_output_streams[] = {
10138             MAX_STALLING_STREAMS,
10139             MAX_PROCESSED_STREAMS,
10140             MAX_RAW_STREAMS};
10141     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10142             max_output_streams,
10143             sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10144 
10145     uint8_t avail_leds = 0;
10146     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10147                       &avail_leds, 0);
10148 
10149     uint8_t focus_dist_calibrated;
10150     int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10151             gCamCapability[cameraId]->focus_dist_calibrated);
10152     if (NAME_NOT_FOUND != val) {
10153         focus_dist_calibrated = (uint8_t)val;
10154         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10155                      &focus_dist_calibrated, 1);
10156     }
10157 
10158     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10159     size = 0;
10160     count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10161             MAX_TEST_PATTERN_CNT);
10162     for (size_t i = 0; i < count; i++) {
10163         int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10164                 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10165         if (NAME_NOT_FOUND != testpatternMode) {
10166             avail_testpattern_modes[size] = testpatternMode;
10167             size++;
10168         }
10169     }
10170     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10171                       avail_testpattern_modes,
10172                       size);
10173 
10174     uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10175     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10176                       &max_pipeline_depth,
10177                       1);
10178 
10179     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10180     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10181                       &partial_result_count,
10182                        1);
10183 
10184     int32_t max_stall_duration = MAX_REPROCESS_STALL;
10185     staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10186 
10187     Vector<uint8_t> available_capabilities;
10188     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10189     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10190     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10191     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10192     if (supportBurst) {
10193         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10194     }
10195     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10196     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10197     if (hfrEnable && available_hfr_configs.array()) {
10198         available_capabilities.add(
10199                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10200     }
10201 
10202     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10203         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10204     }
10205     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10206             available_capabilities.array(),
10207             available_capabilities.size());
10208 
10209     //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10210     //Assumption is that all bayer cameras support MANUAL_SENSOR.
10211     uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10212             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10213 
10214     staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10215             &aeLockAvailable, 1);
10216 
10217     //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10218     //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10219     uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10220             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10221 
10222     staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10223             &awbLockAvailable, 1);
10224 
10225     int32_t max_input_streams = 1;
10226     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10227                       &max_input_streams,
10228                       1);
10229 
10230     /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10231     int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10232             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10233             HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10234             HAL_PIXEL_FORMAT_YCbCr_420_888};
10235     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10236                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10237 
10238     int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10239     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10240                       &max_latency,
10241                       1);
10242 
10243 #ifndef USE_HAL_3_3
10244     int32_t isp_sensitivity_range[2];
10245     isp_sensitivity_range[0] =
10246         gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10247     isp_sensitivity_range[1] =
10248         gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10249     staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10250                       isp_sensitivity_range,
10251                       sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10252 #endif
10253 
10254     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10255                                            ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10256     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10257             available_hot_pixel_modes,
10258             sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10259 
10260     uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10261                                          ANDROID_SHADING_MODE_FAST,
10262                                          ANDROID_SHADING_MODE_HIGH_QUALITY};
10263     staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10264                       available_shading_modes,
10265                       3);
10266 
10267     uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10268                                                   ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10269     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10270                       available_lens_shading_map_modes,
10271                       2);
10272 
10273     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10274                                       ANDROID_EDGE_MODE_FAST,
10275                                       ANDROID_EDGE_MODE_HIGH_QUALITY,
10276                                       ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10277     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10278             available_edge_modes,
10279             sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10280 
10281     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10282                                            ANDROID_NOISE_REDUCTION_MODE_FAST,
10283                                            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10284                                            ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10285                                            ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10286     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10287             available_noise_red_modes,
10288             sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10289 
10290     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10291                                          ANDROID_TONEMAP_MODE_FAST,
10292                                          ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10293     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10294             available_tonemap_modes,
10295             sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10296 
10297     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10298     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10299             available_hot_pixel_map_modes,
10300             sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10301 
10302     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10303             gCamCapability[cameraId]->reference_illuminant1);
10304     if (NAME_NOT_FOUND != val) {
10305         uint8_t fwkReferenceIlluminant = (uint8_t)val;
10306         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10307     }
10308 
10309     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10310             gCamCapability[cameraId]->reference_illuminant2);
10311     if (NAME_NOT_FOUND != val) {
10312         uint8_t fwkReferenceIlluminant = (uint8_t)val;
10313         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10314     }
10315 
10316     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10317             (void *)gCamCapability[cameraId]->forward_matrix1,
10318             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10319 
10320     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10321             (void *)gCamCapability[cameraId]->forward_matrix2,
10322             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10323 
10324     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10325             (void *)gCamCapability[cameraId]->color_transform1,
10326             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10327 
10328     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10329             (void *)gCamCapability[cameraId]->color_transform2,
10330             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10331 
10332     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10333             (void *)gCamCapability[cameraId]->calibration_transform1,
10334             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10335 
10336     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10337             (void *)gCamCapability[cameraId]->calibration_transform2,
10338             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10339 
10340     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10341        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10342        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10343        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10344        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10345        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10346        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10347        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10348        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10349        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10350        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10351        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10352        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10353        ANDROID_JPEG_GPS_COORDINATES,
10354        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10355        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10356        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10357        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10358        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10359        ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10360        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10361        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10362        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10363        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
10364 #ifndef USE_HAL_3_3
10365        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10366 #endif
10367        ANDROID_STATISTICS_FACE_DETECT_MODE,
10368        ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10369        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10370        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
10371        ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
10372        QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
10373        QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
10374        QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TEMPORAL_DENOISE_ENABLE,
10375        QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, QCAMERA3_USE_ISO_EXP_PRIORITY,
10376        QCAMERA3_SELECT_PRIORITY, QCAMERA3_USE_SATURATION,
10377        QCAMERA3_EXPOSURE_METER, QCAMERA3_USE_AV_TIMER,
10378        QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
10379        QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
10380        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10381        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
10382        QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
10383        QCAMERA3_JPEG_ENCODE_CROP_ENABLE, QCAMERA3_JPEG_ENCODE_CROP_RECT,
10384        QCAMERA3_JPEG_ENCODE_CROP_ROI, QCAMERA3_VIDEO_HDR_MODE,
10385        QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
10386        QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
10387        QCAMERA3_SHARPNESS_STRENGTH, QCAMERA3_HISTOGRAM_MODE,
10388        QCAMERA3_BINNING_CORRECTION_MODE,
10389        /* DevCamDebug metadata request_keys_basic */
10390        DEVCAMDEBUG_META_ENABLE,
10391        /* DevCamDebug metadata end */
10392        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10393        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10394        TANGO_MODE_DATA_SENSOR_FULLFOV,
10395        NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
10396        NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
10397        NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE
10398        };
10399 
10400     size_t request_keys_cnt =
10401             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10402     Vector<int32_t> available_request_keys;
10403     available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10404     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10405         available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10406     }
10407 
10408     if (gExposeEnableZslKey) {
10409         available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10410         available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
10411         available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
10412         available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
10413     }
10414 
10415     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10416             available_request_keys.array(), available_request_keys.size());
10417 
10418     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10419        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10420        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10421        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10422        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10423        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10424        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10425        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10426        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10427        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10428        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10429        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10430        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10431        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10432        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10433        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10434        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
10435        ANDROID_STATISTICS_FACE_DETECT_MODE,
10436        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10437        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10438        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
10439        ANDROID_STATISTICS_FACE_SCORES,
10440 #ifndef USE_HAL_3_3
10441        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10442 #endif
10443        NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
10444        NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
10445        QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
10446        QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
10447        QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TUNING_META_DATA_BLOB,
10448        QCAMERA3_TEMPORAL_DENOISE_ENABLE, QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE,
10449        QCAMERA3_EXPOSURE_METER, QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN,
10450        QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
10451        QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
10452        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10453        QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
10454        QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB, QCAMERA3_VIDEO_HDR_MODE,
10455        QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
10456        QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
10457        QCAMERA3_HISTOGRAM_MODE, QCAMERA3_BINNING_CORRECTION_MODE,
10458        QCAMERA3_STATS_IS_HDR_SCENE, QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
10459        QCAMERA3_STATS_BLINK_DETECTED, QCAMERA3_STATS_BLINK_DEGREE,
10460        QCAMERA3_STATS_SMILE_DEGREE, QCAMERA3_STATS_SMILE_CONFIDENCE,
10461        QCAMERA3_STATS_GAZE_ANGLE, QCAMERA3_STATS_GAZE_DIRECTION,
10462        QCAMERA3_STATS_GAZE_DEGREE,
10463        // DevCamDebug metadata result_keys_basic
10464        DEVCAMDEBUG_META_ENABLE,
10465        // DevCamDebug metadata result_keys AF
10466        DEVCAMDEBUG_AF_LENS_POSITION,
10467        DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10468        DEVCAMDEBUG_AF_TOF_DISTANCE,
10469        DEVCAMDEBUG_AF_LUMA,
10470        DEVCAMDEBUG_AF_HAF_STATE,
10471        DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10472        DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10473        DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10474        DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10475        DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10476        DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10477        DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10478        DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10479        DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10480        DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10481        DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10482        DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10483        DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10484        DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10485        DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10486        DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10487        DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10488        DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10489        DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10490        DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10491        DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10492        DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10493        // DevCamDebug metadata result_keys AEC
10494        DEVCAMDEBUG_AEC_TARGET_LUMA,
10495        DEVCAMDEBUG_AEC_COMP_LUMA,
10496        DEVCAMDEBUG_AEC_AVG_LUMA,
10497        DEVCAMDEBUG_AEC_CUR_LUMA,
10498        DEVCAMDEBUG_AEC_LINECOUNT,
10499        DEVCAMDEBUG_AEC_REAL_GAIN,
10500        DEVCAMDEBUG_AEC_EXP_INDEX,
10501        DEVCAMDEBUG_AEC_LUX_IDX,
10502        // DevCamDebug metadata result_keys zzHDR
10503        DEVCAMDEBUG_AEC_L_REAL_GAIN,
10504        DEVCAMDEBUG_AEC_L_LINECOUNT,
10505        DEVCAMDEBUG_AEC_S_REAL_GAIN,
10506        DEVCAMDEBUG_AEC_S_LINECOUNT,
10507        DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10508        DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10509        // DevCamDebug metadata result_keys ADRC
10510        DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10511        DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10512        DEVCAMDEBUG_AEC_GTM_RATIO,
10513        DEVCAMDEBUG_AEC_LTM_RATIO,
10514        DEVCAMDEBUG_AEC_LA_RATIO,
10515        DEVCAMDEBUG_AEC_GAMMA_RATIO,
10516        // DevCamDebug metadata result_keys AEC MOTION
10517        DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10518        DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10519        DEVCAMDEBUG_AEC_SUBJECT_MOTION,
10520        // DevCamDebug metadata result_keys AWB
10521        DEVCAMDEBUG_AWB_R_GAIN,
10522        DEVCAMDEBUG_AWB_G_GAIN,
10523        DEVCAMDEBUG_AWB_B_GAIN,
10524        DEVCAMDEBUG_AWB_CCT,
10525        DEVCAMDEBUG_AWB_DECISION,
10526        /* DevCamDebug metadata end */
10527        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10528        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10529        NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
10530        NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
10531        NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
10532        NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
10533        NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
10534        NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
10535        NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
10536        NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
10537        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
10538        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
10539        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
10540        NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y
10541        };
10542 
10543     size_t result_keys_cnt =
10544             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10545 
10546     Vector<int32_t> available_result_keys;
10547     available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10548     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10549         available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10550     }
10551     if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10552         available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10553         available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10554     }
10555     if (supportedFaceDetectMode == 1) {
10556         available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10557         available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10558     } else if ((supportedFaceDetectMode == 2) ||
10559             (supportedFaceDetectMode == 3)) {
10560         available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10561         available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10562     }
10563 #ifndef USE_HAL_3_3
10564     {
10565         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10566         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10567     }
10568 #endif
10569 
10570     if (gExposeEnableZslKey) {
10571         available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10572         available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
10573         available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10574         available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
10575     }
10576 
10577     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10578             available_result_keys.array(), available_result_keys.size());
10579 
10580     int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
10581        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10582        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10583        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10584        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10585        ANDROID_SCALER_CROPPING_TYPE,
10586        ANDROID_SYNC_MAX_LATENCY,
10587        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10588        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10589        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10590        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10591        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10592        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10593        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10594        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10595        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10596        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10597        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10598        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10599        ANDROID_LENS_FACING,
10600        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10601        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10602        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10603        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10604        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10605        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10606        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10607        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10608        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10609        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10610        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10611        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10612        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10613        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10614        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10615        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10616        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10617        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10618        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10619        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
10620        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
10621        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10622        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10623        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10624        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10625        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10626        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10627        ANDROID_TONEMAP_MAX_CURVE_POINTS,
10628        ANDROID_CONTROL_AVAILABLE_MODES,
10629        ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10630        ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10631        ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10632        ANDROID_SHADING_AVAILABLE_MODES,
10633        ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10634 #ifndef USE_HAL_3_3
10635        ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10636        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10637 #endif
10638        QCAMERA3_OPAQUE_RAW_FORMAT, QCAMERA3_EXP_TIME_RANGE,
10639        QCAMERA3_SATURATION_RANGE, QCAMERA3_SENSOR_IS_MONO_ONLY,
10640        QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10641        QCAMERA3_SHARPNESS_RANGE,
10642        QCAMERA3_HISTOGRAM_BUCKETS, QCAMERA3_HISTOGRAM_MAX_COUNT,
10643        QCAMERA3_STATS_BSGC_AVAILABLE
10644        };
10645 
10646     available_characteristics_keys.appendArray(characteristics_keys_basic,
10647             sizeof(characteristics_keys_basic)/sizeof(int32_t));
10648 #ifndef USE_HAL_3_3
10649     if (hasBlackRegions) {
10650         available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10651     }
10652 #endif
10653 
10654     if (0 <= indexPD) {
10655         int32_t depthKeys[] = {
10656                 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10657                 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10658                 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10659                 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10660                 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10661         };
10662         available_characteristics_keys.appendArray(depthKeys,
10663                 sizeof(depthKeys) / sizeof(depthKeys[0]));
10664     }
10665 
10666     /*available stall durations depend on the hw + sw and will be different for different devices */
10667     /*have to add for raw after implementation*/
10668     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10669     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10670 
10671     Vector<int64_t> available_stall_durations;
10672     for (uint32_t j = 0; j < stall_formats_count; j++) {
10673         if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10674             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10675                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10676                 available_stall_durations.add(stall_formats[j]);
10677                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10678                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10679                 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10680           }
10681         } else {
10682             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10683                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10684                 available_stall_durations.add(stall_formats[j]);
10685                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10686                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10687                 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10688             }
10689         }
10690     }
10691     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10692                       available_stall_durations.array(),
10693                       available_stall_durations.size());
10694 
10695     //QCAMERA3_OPAQUE_RAW
10696     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10697     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10698     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10699     case LEGACY_RAW:
10700         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10701             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10702         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10703             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10704         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10705             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10706         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10707         break;
10708     case MIPI_RAW:
10709         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10710             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10711         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10712             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10713         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10714             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10715         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10716         break;
10717     default:
10718         LOGE("unknown opaque_raw_format %d",
10719                 gCamCapability[cameraId]->opaque_raw_fmt);
10720         break;
10721     }
10722     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10723 
10724     Vector<int32_t> strides;
10725     for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10726             gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10727         cam_stream_buf_plane_info_t buf_planes;
10728         strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10729         strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10730         mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10731             &gCamCapability[cameraId]->padding_info, &buf_planes);
10732         strides.add(buf_planes.plane_info.mp[0].stride);
10733     }
10734 
10735     if (!strides.isEmpty()) {
10736         staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10737                 strides.size());
10738         available_characteristics_keys.add(QCAMERA3_OPAQUE_RAW_STRIDES);
10739     }
10740 
10741     //TBD: remove the following line once backend advertises zzHDR in feature mask
10742     gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
10743     //Video HDR default
10744     if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10745             (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
10746             CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
10747         int32_t vhdr_mode[] = {
10748                 QCAMERA3_VIDEO_HDR_MODE_OFF,
10749                 QCAMERA3_VIDEO_HDR_MODE_ON};
10750 
10751         size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10752         staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10753                     vhdr_mode, vhdr_mode_count);
10754         available_characteristics_keys.add(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES);
10755     }
10756 
10757     staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10758             (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10759             sizeof(gCamCapability[cameraId]->related_cam_calibration));
10760 
10761     uint8_t isMonoOnly =
10762             (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10763     staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10764             &isMonoOnly, 1);
10765 
10766 #ifndef USE_HAL_3_3
10767     Vector<int32_t> opaque_size;
10768     for (size_t j = 0; j < scalar_formats_count; j++) {
10769         if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10770             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10771                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10772                 cam_stream_buf_plane_info_t buf_planes;
10773 
10774                 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10775                          &gCamCapability[cameraId]->padding_info, &buf_planes);
10776 
10777                 if (rc == 0) {
10778                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10779                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10780                     opaque_size.add(buf_planes.plane_info.frame_len);
10781                 }else {
10782                     LOGE("raw frame calculation failed!");
10783                 }
10784             }
10785         }
10786     }
10787 
10788     if ((opaque_size.size() > 0) &&
10789             (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10790         staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10791     else
10792         LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10793 #endif
10794 
10795     if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10796         int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10797         size = 0;
10798         count = CAM_IR_MODE_MAX;
10799         count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10800         for (size_t i = 0; i < count; i++) {
10801             int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10802                     gCamCapability[cameraId]->supported_ir_modes[i]);
10803             if (NAME_NOT_FOUND != val) {
10804                 avail_ir_modes[size] = (int32_t)val;
10805                 size++;
10806             }
10807         }
10808         staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10809                 avail_ir_modes, size);
10810         available_characteristics_keys.add(QCAMERA3_IR_AVAILABLE_MODES);
10811     }
10812 
10813     if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10814         int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10815         size = 0;
10816         count = CAM_AEC_CONVERGENCE_MAX;
10817         count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10818         for (size_t i = 0; i < count; i++) {
10819             int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10820                     gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10821             if (NAME_NOT_FOUND != val) {
10822                 available_instant_aec_modes[size] = (int32_t)val;
10823                 size++;
10824             }
10825         }
10826         staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10827                 available_instant_aec_modes, size);
10828         available_characteristics_keys.add(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES);
10829     }
10830 
10831     int32_t sharpness_range[] = {
10832             gCamCapability[cameraId]->sharpness_ctrl.min_value,
10833             gCamCapability[cameraId]->sharpness_ctrl.max_value};
10834     staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10835 
10836     if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10837         int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10838         size = 0;
10839         count = CAM_BINNING_CORRECTION_MODE_MAX;
10840         count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10841         for (size_t i = 0; i < count; i++) {
10842             int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10843                     METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10844                     gCamCapability[cameraId]->supported_binning_modes[i]);
10845             if (NAME_NOT_FOUND != val) {
10846                 avail_binning_modes[size] = (int32_t)val;
10847                 size++;
10848             }
10849         }
10850         staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10851                 avail_binning_modes, size);
10852         available_characteristics_keys.add(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES);
10853     }
10854 
10855     if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10856         int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10857         size = 0;
10858         count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10859         for (size_t i = 0; i < count; i++) {
10860             int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10861                     gCamCapability[cameraId]->supported_aec_modes[i]);
10862             if (NAME_NOT_FOUND != val)
10863                 available_aec_modes[size++] = val;
10864         }
10865         staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10866                 available_aec_modes, size);
10867         available_characteristics_keys.add(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES);
10868     }
10869 
10870     if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10871         int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10872         size = 0;
10873         count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10874         for (size_t i = 0; i < count; i++) {
10875             int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10876                     gCamCapability[cameraId]->supported_iso_modes[i]);
10877             if (NAME_NOT_FOUND != val)
10878                 available_iso_modes[size++] = val;
10879         }
10880         staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10881                 available_iso_modes, size);
10882         available_characteristics_keys.add(QCAMERA3_ISO_AVAILABLE_MODES);
10883     }
10884 
10885     int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10886     for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
10887         available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10888     staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10889             available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10890 
10891     int32_t available_saturation_range[4];
10892     available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10893     available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10894     available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10895     available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10896     staticInfo.update(QCAMERA3_SATURATION_RANGE,
10897             available_saturation_range, 4);
10898 
10899     uint8_t is_hdr_values[2];
10900     is_hdr_values[0] = 0;
10901     is_hdr_values[1] = 1;
10902     staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10903             is_hdr_values, 2);
10904 
10905     float is_hdr_confidence_range[2];
10906     is_hdr_confidence_range[0] = 0.0;
10907     is_hdr_confidence_range[1] = 1.0;
10908     staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10909             is_hdr_confidence_range, 2);
10910 
10911     size_t eepromLength = strnlen(
10912             reinterpret_cast<const char *>(
10913                     gCamCapability[cameraId]->eeprom_version_info),
10914             sizeof(gCamCapability[cameraId]->eeprom_version_info));
10915     if (0 < eepromLength) {
10916         char easelInfo[] = ",E:N";
10917         char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10918         if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10919             eepromLength += sizeof(easelInfo);
10920             strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10921                     gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
10922                     MAX_EEPROM_VERSION_INFO_LEN);
10923         }
10924         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10925                 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10926         available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO);
10927     }
10928 
10929     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
10930                       available_characteristics_keys.array(),
10931                       available_characteristics_keys.size());
10932 
10933     gStaticMetadata[cameraId] = staticInfo.release();
10934     return rc;
10935 }
10936 
10937 /*===========================================================================
10938  * FUNCTION   : makeTable
10939  *
10940  * DESCRIPTION: make a table of sizes
10941  *
10942  * PARAMETERS :
10943  *
10944  *
10945  *==========================================================================*/
makeTable(cam_dimension_t * dimTable,size_t size,size_t max_size,int32_t * sizeTable)10946 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10947         size_t max_size, int32_t *sizeTable)
10948 {
10949     size_t j = 0;
10950     if (size > max_size) {
10951        size = max_size;
10952     }
10953     for (size_t i = 0; i < size; i++) {
10954         sizeTable[j] = dimTable[i].width;
10955         sizeTable[j+1] = dimTable[i].height;
10956         j+=2;
10957     }
10958 }
10959 
10960 /*===========================================================================
10961  * FUNCTION   : makeFPSTable
10962  *
10963  * DESCRIPTION: make a table of fps ranges
10964  *
10965  * PARAMETERS :
10966  *
10967  *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,size_t size,size_t max_size,int32_t * fpsRangesTable)10968 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10969         size_t max_size, int32_t *fpsRangesTable)
10970 {
10971     size_t j = 0;
10972     if (size > max_size) {
10973        size = max_size;
10974     }
10975     for (size_t i = 0; i < size; i++) {
10976         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10977         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10978         j+=2;
10979     }
10980 }
10981 
10982 /*===========================================================================
10983  * FUNCTION   : makeOverridesList
10984  *
10985  * DESCRIPTION: make a list of scene mode overrides
10986  *
10987  * PARAMETERS :
10988  *
10989  *
10990  *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,size_t size,size_t max_size,uint8_t * overridesList,uint8_t * supported_indexes,uint32_t camera_id)10991 void QCamera3HardwareInterface::makeOverridesList(
10992         cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10993         uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10994 {
10995     /*daemon will give a list of overrides for all scene modes.
10996       However we should send the fwk only the overrides for the scene modes
10997       supported by the framework*/
10998     size_t j = 0;
10999     if (size > max_size) {
11000        size = max_size;
11001     }
11002     size_t focus_count = CAM_FOCUS_MODE_MAX;
11003     focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
11004             focus_count);
11005     for (size_t i = 0; i < size; i++) {
11006         bool supt = false;
11007         size_t index = supported_indexes[i];
11008         overridesList[j] = gCamCapability[camera_id]->flash_available ?
11009                 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
11010         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
11011                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11012                 overridesTable[index].awb_mode);
11013         if (NAME_NOT_FOUND != val) {
11014             overridesList[j+1] = (uint8_t)val;
11015         }
11016         uint8_t focus_override = overridesTable[index].af_mode;
11017         for (size_t k = 0; k < focus_count; k++) {
11018            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
11019               supt = true;
11020               break;
11021            }
11022         }
11023         if (supt) {
11024             val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11025                     focus_override);
11026             if (NAME_NOT_FOUND != val) {
11027                 overridesList[j+2] = (uint8_t)val;
11028             }
11029         } else {
11030            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
11031         }
11032         j+=3;
11033     }
11034 }
11035 
11036 /*===========================================================================
11037  * FUNCTION   : filterJpegSizes
11038  *
11039  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
11040  *              could be downscaled to
11041  *
11042  * PARAMETERS :
11043  *
11044  * RETURN     : length of jpegSizes array
11045  *==========================================================================*/
11046 
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,size_t processedSizesCnt,size_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)11047 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
11048         size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
11049         uint8_t downscale_factor)
11050 {
11051     if (0 == downscale_factor) {
11052         downscale_factor = 1;
11053     }
11054 
11055     int32_t min_width = active_array_size.width / downscale_factor;
11056     int32_t min_height = active_array_size.height / downscale_factor;
11057     size_t jpegSizesCnt = 0;
11058     if (processedSizesCnt > maxCount) {
11059         processedSizesCnt = maxCount;
11060     }
11061     for (size_t i = 0; i < processedSizesCnt; i+=2) {
11062         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
11063             jpegSizes[jpegSizesCnt] = processedSizes[i];
11064             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
11065             jpegSizesCnt += 2;
11066         }
11067     }
11068     return jpegSizesCnt;
11069 }
11070 
11071 /*===========================================================================
11072  * FUNCTION   : computeNoiseModelEntryS
11073  *
11074  * DESCRIPTION: function to map a given sensitivity to the S noise
11075  *              model parameters in the DNG noise model.
11076  *
11077  * PARAMETERS : sens : the sensor sensitivity
11078  *
11079  ** RETURN    : S (sensor amplification) noise
11080  *
11081  *==========================================================================*/
computeNoiseModelEntryS(int32_t sens)11082 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
11083     double s = gCamCapability[mCameraId]->gradient_S * sens +
11084             gCamCapability[mCameraId]->offset_S;
11085     return ((s < 0.0) ? 0.0 : s);
11086 }
11087 
11088 /*===========================================================================
11089  * FUNCTION   : computeNoiseModelEntryO
11090  *
11091  * DESCRIPTION: function to map a given sensitivity to the O noise
11092  *              model parameters in the DNG noise model.
11093  *
11094  * PARAMETERS : sens : the sensor sensitivity
11095  *
11096  ** RETURN    : O (sensor readout) noise
11097  *
11098  *==========================================================================*/
computeNoiseModelEntryO(int32_t sens)11099 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
11100     int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
11101     double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
11102             1.0 : (1.0 * sens / max_analog_sens);
11103     double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
11104             gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
11105     return ((o < 0.0) ? 0.0 : o);
11106 }
11107 
11108 /*===========================================================================
11109  * FUNCTION   : getSensorSensitivity
11110  *
11111  * DESCRIPTION: convert iso_mode to an integer value
11112  *
11113  * PARAMETERS : iso_mode : the iso_mode supported by sensor
11114  *
11115  ** RETURN    : sensitivity supported by sensor
11116  *
11117  *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)11118 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
11119 {
11120     int32_t sensitivity;
11121 
11122     switch (iso_mode) {
11123     case CAM_ISO_MODE_100:
11124         sensitivity = 100;
11125         break;
11126     case CAM_ISO_MODE_200:
11127         sensitivity = 200;
11128         break;
11129     case CAM_ISO_MODE_400:
11130         sensitivity = 400;
11131         break;
11132     case CAM_ISO_MODE_800:
11133         sensitivity = 800;
11134         break;
11135     case CAM_ISO_MODE_1600:
11136         sensitivity = 1600;
11137         break;
11138     default:
11139         sensitivity = -1;
11140         break;
11141     }
11142     return sensitivity;
11143 }
11144 
initHdrPlusClientLocked()11145 int QCamera3HardwareInterface::initHdrPlusClientLocked() {
11146     if (gEaselManagerClient == nullptr) {
11147         gEaselManagerClient = EaselManagerClient::create();
11148         if (gEaselManagerClient == nullptr) {
11149             ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
11150             return -ENODEV;
11151         }
11152     }
11153 
11154     if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
11155         // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
11156         //  to connect to Easel.
11157         bool doNotpowerOnEasel =
11158                 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
11159 
11160         if (doNotpowerOnEasel) {
11161             ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
11162             return OK;
11163         }
11164 
11165         // If Easel is present, power on Easel and suspend it immediately.
11166         status_t res = gEaselManagerClient->open();
11167         if (res != OK) {
11168             ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
11169                     res);
11170             return res;
11171         }
11172 
11173         EaselManagerClientOpened = true;
11174 
11175         res = gEaselManagerClient->suspend();
11176         if (res != OK) {
11177             ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
11178         }
11179 
11180         gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
11181         gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
11182 
11183         // Expose enableZsl key only when HDR+ mode is enabled.
11184         gExposeEnableZslKey = !gEaselBypassOnly;
11185     }
11186 
11187     return OK;
11188 }
11189 
11190 /*===========================================================================
11191  * FUNCTION   : getCamInfo
11192  *
11193  * DESCRIPTION: query camera capabilities
11194  *
11195  * PARAMETERS :
11196  *   @cameraId  : camera Id
11197  *   @info      : camera info struct to be filled in with camera capabilities
11198  *
11199  * RETURN     : int type of status
11200  *              NO_ERROR  -- success
11201  *              none-zero failure code
11202  *==========================================================================*/
getCamInfo(uint32_t cameraId,struct camera_info * info)11203 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11204         struct camera_info *info)
11205 {
11206     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
11207     int rc = 0;
11208 
11209     pthread_mutex_lock(&gCamLock);
11210 
11211     {
11212         std::unique_lock<std::mutex> l(gHdrPlusClientLock);
11213         rc = initHdrPlusClientLocked();
11214         if (rc != OK) {
11215             ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11216             pthread_mutex_unlock(&gCamLock);
11217             return rc;
11218         }
11219     }
11220 
11221     if (NULL == gCamCapability[cameraId]) {
11222         rc = initCapabilities(cameraId);
11223         if (rc < 0) {
11224             pthread_mutex_unlock(&gCamLock);
11225             return rc;
11226         }
11227     }
11228 
11229     if (NULL == gStaticMetadata[cameraId]) {
11230         rc = initStaticMetadata(cameraId);
11231         if (rc < 0) {
11232             pthread_mutex_unlock(&gCamLock);
11233             return rc;
11234         }
11235     }
11236 
11237     switch(gCamCapability[cameraId]->position) {
11238     case CAM_POSITION_BACK:
11239     case CAM_POSITION_BACK_AUX:
11240         info->facing = CAMERA_FACING_BACK;
11241         break;
11242 
11243     case CAM_POSITION_FRONT:
11244     case CAM_POSITION_FRONT_AUX:
11245         info->facing = CAMERA_FACING_FRONT;
11246         break;
11247 
11248     default:
11249         LOGE("Unknown position type %d for camera id:%d",
11250                 gCamCapability[cameraId]->position, cameraId);
11251         rc = -1;
11252         break;
11253     }
11254 
11255 
11256     info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
11257 #ifndef USE_HAL_3_3
11258     info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11259 #else
11260     info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
11261 #endif
11262     info->static_camera_characteristics = gStaticMetadata[cameraId];
11263 
11264     //For now assume both cameras can operate independently.
11265     info->conflicting_devices = NULL;
11266     info->conflicting_devices_length = 0;
11267 
11268     //resource cost is 100 * MIN(1.0, m/M),
11269     //where m is throughput requirement with maximum stream configuration
11270     //and M is CPP maximum throughput.
11271     float max_fps = 0.0;
11272     for (uint32_t i = 0;
11273             i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11274         if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11275             max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11276     }
11277     float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11278             gCamCapability[cameraId]->active_array_size.width *
11279             gCamCapability[cameraId]->active_array_size.height * max_fps /
11280             gCamCapability[cameraId]->max_pixel_bandwidth;
11281     info->resource_cost = 100 * MIN(1.0, ratio);
11282     LOGI("camera %d resource cost is %d", cameraId,
11283             info->resource_cost);
11284 
11285     pthread_mutex_unlock(&gCamLock);
11286     return rc;
11287 }
11288 
11289 /*===========================================================================
11290  * FUNCTION   : translateCapabilityToMetadata
11291  *
11292  * DESCRIPTION: translate the capability into camera_metadata_t
11293  *
11294  * PARAMETERS : type of the request
11295  *
11296  *
11297  * RETURN     : success: camera_metadata_t*
11298  *              failure: NULL
11299  *
11300  *==========================================================================*/
translateCapabilityToMetadata(int type)11301 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11302 {
11303     if (mDefaultMetadata[type] != NULL) {
11304         return mDefaultMetadata[type];
11305     }
11306     //first time we are handling this request
11307     //fill up the metadata structure using the wrapper class
11308     CameraMetadata settings;
11309     //translate from cam_capability_t to camera_metadata_tag_t
11310     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11311     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11312     int32_t defaultRequestID = 0;
11313     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11314 
11315     /* OIS disable */
11316     char ois_prop[PROPERTY_VALUE_MAX];
11317     memset(ois_prop, 0, sizeof(ois_prop));
11318     property_get("persist.camera.ois.disable", ois_prop, "0");
11319     uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11320 
11321     /* Force video to use OIS */
11322     char videoOisProp[PROPERTY_VALUE_MAX];
11323     memset(videoOisProp, 0, sizeof(videoOisProp));
11324     property_get("persist.camera.ois.video", videoOisProp, "1");
11325     uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
11326 
11327     // Hybrid AE enable/disable
11328     char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11329     memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11330     property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11331     uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11332 
11333     uint8_t controlIntent = 0;
11334     uint8_t focusMode;
11335     uint8_t vsMode;
11336     uint8_t optStabMode;
11337     uint8_t cacMode;
11338     uint8_t edge_mode;
11339     uint8_t noise_red_mode;
11340     uint8_t tonemap_mode;
11341     bool highQualityModeEntryAvailable = FALSE;
11342     bool fastModeEntryAvailable = FALSE;
11343     uint8_t histogramEnable = false;
11344     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11345     optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11346     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
11347     uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
11348     uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
11349 
11350     switch (type) {
11351       case CAMERA3_TEMPLATE_PREVIEW:
11352         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11353         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11354         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11355         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11356         edge_mode = ANDROID_EDGE_MODE_FAST;
11357         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11358         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11359         break;
11360       case CAMERA3_TEMPLATE_STILL_CAPTURE:
11361         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11362         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11363         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11364         edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11365         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11366         tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11367         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11368         // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11369         for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11370             if (gCamCapability[mCameraId]->aberration_modes[i] ==
11371                     CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11372                 highQualityModeEntryAvailable = TRUE;
11373             } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11374                     CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11375                 fastModeEntryAvailable = TRUE;
11376             }
11377         }
11378         if (highQualityModeEntryAvailable) {
11379             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11380         } else if (fastModeEntryAvailable) {
11381             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11382         }
11383         if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11384             shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11385         }
11386         enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
11387         break;
11388       case CAMERA3_TEMPLATE_VIDEO_RECORD:
11389         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11390         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11391         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11392         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11393         edge_mode = ANDROID_EDGE_MODE_FAST;
11394         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11395         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11396         if (forceVideoOis)
11397             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11398         break;
11399       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11400         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11401         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11402         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11403         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11404         edge_mode = ANDROID_EDGE_MODE_FAST;
11405         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11406         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11407         if (forceVideoOis)
11408             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11409         break;
11410       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11411         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11412         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11413         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11414         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11415         edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11416         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11417         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11418         break;
11419       case CAMERA3_TEMPLATE_MANUAL:
11420         edge_mode = ANDROID_EDGE_MODE_FAST;
11421         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11422         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11423         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11424         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11425         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11426         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11427         break;
11428       default:
11429         edge_mode = ANDROID_EDGE_MODE_FAST;
11430         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11431         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11432         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11433         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11434         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11435         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11436         break;
11437     }
11438     // Set CAC to OFF if underlying device doesn't support
11439     if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11440         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11441     }
11442     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11443     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11444     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11445     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11446         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11447     }
11448     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
11449     settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
11450     settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
11451 
11452     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11453             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11454         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11455     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11456             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11457             || ois_disable)
11458         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11459     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
11460     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
11461 
11462     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11463             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11464 
11465     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11466     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11467 
11468     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11469     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11470 
11471     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11472     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11473 
11474     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11475     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11476 
11477     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11478     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11479 
11480     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11481     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11482 
11483     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11484     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11485 
11486     /*flash*/
11487     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11488     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11489 
11490     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11491     settings.update(ANDROID_FLASH_FIRING_POWER,
11492             &flashFiringLevel, 1);
11493 
11494     /* lens */
11495     float default_aperture = gCamCapability[mCameraId]->apertures[0];
11496     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11497 
11498     if (gCamCapability[mCameraId]->filter_densities_count) {
11499         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11500         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11501                         gCamCapability[mCameraId]->filter_densities_count);
11502     }
11503 
11504     float default_focal_length = gCamCapability[mCameraId]->focal_length;
11505     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11506 
11507     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11508     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11509 
11510     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11511     settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11512 
11513     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11514     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11515 
11516     /* face detection (default to OFF) */
11517     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11518     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11519 
11520     static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11521     settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
11522 
11523     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11524     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11525 
11526     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11527     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11528 
11529 
11530     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11531     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11532 
11533     /* Exposure time(Update the Min Exposure Time)*/
11534     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11535     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11536 
11537     /* frame duration */
11538     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11539     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11540 
11541     /* sensitivity */
11542     static const int32_t default_sensitivity = 100;
11543     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
11544 #ifndef USE_HAL_3_3
11545     static const int32_t default_isp_sensitivity =
11546             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11547     settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11548 #endif
11549 
11550     /*edge mode*/
11551     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11552 
11553     /*noise reduction mode*/
11554     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11555 
11556     /*color correction mode*/
11557     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11558     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11559 
11560     /*transform matrix mode*/
11561     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11562 
11563     int32_t scaler_crop_region[4];
11564     scaler_crop_region[0] = 0;
11565     scaler_crop_region[1] = 0;
11566     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11567     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11568     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11569 
11570     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11571     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11572 
11573     /*focus distance*/
11574     float focus_distance = 0.0;
11575     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11576 
11577     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
11578     /* Restrict template max_fps to 30 */
11579     float max_range = 0.0;
11580     float max_fixed_fps = 0.0;
11581     int32_t fps_range[2] = {0, 0};
11582     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11583             i++) {
11584         if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11585                 TEMPLATE_MAX_PREVIEW_FPS) {
11586             continue;
11587         }
11588         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11589             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11590         if (type == CAMERA3_TEMPLATE_PREVIEW ||
11591                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11592                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11593             if (range > max_range) {
11594                 fps_range[0] =
11595                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11596                 fps_range[1] =
11597                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11598                 max_range = range;
11599             }
11600         } else {
11601             if (range < 0.01 && max_fixed_fps <
11602                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11603                 fps_range[0] =
11604                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11605                 fps_range[1] =
11606                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11607                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11608             }
11609         }
11610     }
11611     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11612 
11613     /*precapture trigger*/
11614     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11615     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11616 
11617     /*af trigger*/
11618     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11619     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11620 
11621     /* ae & af regions */
11622     int32_t active_region[] = {
11623             gCamCapability[mCameraId]->active_array_size.left,
11624             gCamCapability[mCameraId]->active_array_size.top,
11625             gCamCapability[mCameraId]->active_array_size.left +
11626                     gCamCapability[mCameraId]->active_array_size.width,
11627             gCamCapability[mCameraId]->active_array_size.top +
11628                     gCamCapability[mCameraId]->active_array_size.height,
11629             0};
11630     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11631             sizeof(active_region) / sizeof(active_region[0]));
11632     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11633             sizeof(active_region) / sizeof(active_region[0]));
11634 
11635     /* black level lock */
11636     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11637     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11638 
11639     //special defaults for manual template
11640     if (type == CAMERA3_TEMPLATE_MANUAL) {
11641         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11642         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11643 
11644         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11645         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11646 
11647         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11648         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11649 
11650         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11651         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11652 
11653         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11654         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11655 
11656         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11657         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11658     }
11659 
11660 
11661     /* TNR
11662      * We'll use this location to determine which modes TNR will be set.
11663      * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11664      * This is not to be confused with linking on a per stream basis that decision
11665      * is still on per-session basis and will be handled as part of config stream
11666      */
11667     uint8_t tnr_enable = 0;
11668 
11669     if (m_bTnrPreview || m_bTnrVideo) {
11670 
11671         switch (type) {
11672             case CAMERA3_TEMPLATE_VIDEO_RECORD:
11673                     tnr_enable = 1;
11674                     break;
11675 
11676             default:
11677                     tnr_enable = 0;
11678                     break;
11679         }
11680 
11681         int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11682         settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11683         settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11684 
11685         LOGD("TNR:%d with process plate %d for template:%d",
11686                              tnr_enable, tnr_process_type, type);
11687     }
11688 
11689     //Update Link tags to default
11690     uint8_t sync_type = CAM_TYPE_STANDALONE;
11691     settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11692 
11693     uint8_t is_main = 1;
11694     settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11695 
11696     uint8_t related_camera_id = mCameraId;
11697     settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
11698 
11699     /* CDS default */
11700     char prop[PROPERTY_VALUE_MAX];
11701     memset(prop, 0, sizeof(prop));
11702     property_get("persist.camera.CDS", prop, "Auto");
11703     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11704     cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11705     if (CAM_CDS_MODE_MAX == cds_mode) {
11706         cds_mode = CAM_CDS_MODE_AUTO;
11707     }
11708 
11709     /* Disabling CDS in templates which have TNR enabled*/
11710     if (tnr_enable)
11711         cds_mode = CAM_CDS_MODE_OFF;
11712 
11713     int32_t mode = cds_mode;
11714     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
11715 
11716     /* Manual Convergence AEC Speed is disabled by default*/
11717     float default_aec_speed = 0;
11718     settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11719 
11720     /* Manual Convergence AWB Speed is disabled by default*/
11721     float default_awb_speed = 0;
11722     settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11723 
11724     // Set instant AEC to normal convergence by default
11725     int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11726     settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11727 
11728     if (gExposeEnableZslKey) {
11729         settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11730         int32_t postview = 0;
11731         settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
11732         int32_t continuousZslCapture = 0;
11733         settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
11734         // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE and
11735         // CAMERA3_TEMPLATE_PREVIEW.
11736         int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11737                                   type == CAMERA3_TEMPLATE_PREVIEW) ? 0 : 1;
11738         settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
11739 
11740         // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
11741         // hybrid ae is enabled for 3rd party app HDR+.
11742         if (type == CAMERA3_TEMPLATE_PREVIEW ||
11743                 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
11744             hybrid_ae = 1;
11745         }
11746     }
11747     /* hybrid ae */
11748     settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11749 
11750     mDefaultMetadata[type] = settings.release();
11751 
11752     return mDefaultMetadata[type];
11753 }
11754 
11755 /*===========================================================================
11756  * FUNCTION   : getExpectedFrameDuration
11757  *
11758  * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11759  *              duration
11760  *
11761  * PARAMETERS :
11762  *   @request   : request settings
11763  *   @frameDuration : The maximum frame duration in nanoseconds
11764  *
11765  * RETURN     : None
11766  *==========================================================================*/
getExpectedFrameDuration(const camera_metadata_t * request,nsecs_t * frameDuration)11767 void QCamera3HardwareInterface::getExpectedFrameDuration(
11768         const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11769     if (nullptr == frameDuration) {
11770         return;
11771     }
11772 
11773     camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11774     find_camera_metadata_ro_entry(request,
11775             ANDROID_SENSOR_EXPOSURE_TIME,
11776             &e);
11777     if (e.count > 0) {
11778         *frameDuration = e.data.i64[0];
11779     }
11780     find_camera_metadata_ro_entry(request,
11781             ANDROID_SENSOR_FRAME_DURATION,
11782             &e);
11783     if (e.count > 0) {
11784         *frameDuration = std::max(e.data.i64[0], *frameDuration);
11785     }
11786 }
11787 
11788 /*===========================================================================
11789  * FUNCTION   : calculateMaxExpectedDuration
11790  *
11791  * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11792  *              current camera settings.
11793  *
11794  * PARAMETERS :
11795  *   @request   : request settings
11796  *
11797  * RETURN     : Expected frame duration in nanoseconds.
11798  *==========================================================================*/
calculateMaxExpectedDuration(const camera_metadata_t * request)11799 nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11800         const camera_metadata_t *request) {
11801     nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11802     camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11803     find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11804     if (e.count == 0) {
11805         return maxExpectedDuration;
11806     }
11807 
11808     if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11809         getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11810     }
11811 
11812     if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11813         return maxExpectedDuration;
11814     }
11815 
11816     find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11817     if (e.count == 0) {
11818         return maxExpectedDuration;
11819     }
11820 
11821     switch (e.data.u8[0]) {
11822         case ANDROID_CONTROL_AE_MODE_OFF:
11823             getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11824             break;
11825         default:
11826             find_camera_metadata_ro_entry(request,
11827                     ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11828                     &e);
11829             if (e.count > 1) {
11830                 maxExpectedDuration = 1e9 / e.data.u8[0];
11831             }
11832             break;
11833     }
11834 
11835     return maxExpectedDuration;
11836 }
11837 
11838 /*===========================================================================
11839  * FUNCTION   : setFrameParameters
11840  *
11841  * DESCRIPTION: set parameters per frame as requested in the metadata from
11842  *              framework
11843  *
11844  * PARAMETERS :
11845  *   @request   : request that needs to be serviced
11846  *   @streamsArray : Stream ID of all the requested streams
11847  *   @blob_request: Whether this request is a blob request or not
11848  *
11849  * RETURN     : success: NO_ERROR
11850  *              failure:
11851  *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamsArray,int blob_request,uint32_t snapshotStreamId)11852 int QCamera3HardwareInterface::setFrameParameters(
11853                     camera3_capture_request_t *request,
11854                     cam_stream_ID_t streamsArray,
11855                     int blob_request,
11856                     uint32_t snapshotStreamId)
11857 {
11858     /*translate from camera_metadata_t type to parm_type_t*/
11859     int rc = 0;
11860     int32_t hal_version = CAM_HAL_V3;
11861 
11862     clear_metadata_buffer(mParameters);
11863     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11864         LOGE("Failed to set hal version in the parameters");
11865         return BAD_VALUE;
11866     }
11867 
11868     /*we need to update the frame number in the parameters*/
11869     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11870             request->frame_number)) {
11871         LOGE("Failed to set the frame number in the parameters");
11872         return BAD_VALUE;
11873     }
11874 
11875     /* Update stream id of all the requested buffers */
11876     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
11877         LOGE("Failed to set stream type mask in the parameters");
11878         return BAD_VALUE;
11879     }
11880 
11881     if (mUpdateDebugLevel) {
11882         uint32_t dummyDebugLevel = 0;
11883         /* The value of dummyDebugLevel is irrelavent. On
11884          * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11885         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11886                 dummyDebugLevel)) {
11887             LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11888             return BAD_VALUE;
11889         }
11890         mUpdateDebugLevel = false;
11891     }
11892 
11893     if(request->settings != NULL){
11894         mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
11895         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11896         if (blob_request)
11897             memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11898     }
11899 
11900     return rc;
11901 }
11902 
11903 /*===========================================================================
11904  * FUNCTION   : setReprocParameters
11905  *
11906  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11907  *              return it.
11908  *
11909  * PARAMETERS :
11910  *   @request   : request that needs to be serviced
11911  *
11912  * RETURN     : success: NO_ERROR
11913  *              failure:
11914  *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)11915 int32_t QCamera3HardwareInterface::setReprocParameters(
11916         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11917         uint32_t snapshotStreamId)
11918 {
11919     /*translate from camera_metadata_t type to parm_type_t*/
11920     int rc = 0;
11921 
11922     if (NULL == request->settings){
11923         LOGE("Reprocess settings cannot be NULL");
11924         return BAD_VALUE;
11925     }
11926 
11927     if (NULL == reprocParam) {
11928         LOGE("Invalid reprocessing metadata buffer");
11929         return BAD_VALUE;
11930     }
11931     clear_metadata_buffer(reprocParam);
11932 
11933     /*we need to update the frame number in the parameters*/
11934     if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11935             request->frame_number)) {
11936         LOGE("Failed to set the frame number in the parameters");
11937         return BAD_VALUE;
11938     }
11939 
11940     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11941     if (rc < 0) {
11942         LOGE("Failed to translate reproc request");
11943         return rc;
11944     }
11945 
11946     CameraMetadata frame_settings;
11947     frame_settings = request->settings;
11948     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11949             frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11950         int32_t *crop_count =
11951                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11952         int32_t *crop_data =
11953                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11954         int32_t *roi_map =
11955                 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11956         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11957             cam_crop_data_t crop_meta;
11958             memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11959             crop_meta.num_of_streams = 1;
11960             crop_meta.crop_info[0].crop.left   = crop_data[0];
11961             crop_meta.crop_info[0].crop.top    = crop_data[1];
11962             crop_meta.crop_info[0].crop.width  = crop_data[2];
11963             crop_meta.crop_info[0].crop.height = crop_data[3];
11964 
11965             crop_meta.crop_info[0].roi_map.left =
11966                     roi_map[0];
11967             crop_meta.crop_info[0].roi_map.top =
11968                     roi_map[1];
11969             crop_meta.crop_info[0].roi_map.width =
11970                     roi_map[2];
11971             crop_meta.crop_info[0].roi_map.height =
11972                     roi_map[3];
11973 
11974             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11975                 rc = BAD_VALUE;
11976             }
11977             LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11978                     request->input_buffer->stream,
11979                     crop_meta.crop_info[0].crop.left,
11980                     crop_meta.crop_info[0].crop.top,
11981                     crop_meta.crop_info[0].crop.width,
11982                     crop_meta.crop_info[0].crop.height);
11983             LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11984                     request->input_buffer->stream,
11985                     crop_meta.crop_info[0].roi_map.left,
11986                     crop_meta.crop_info[0].roi_map.top,
11987                     crop_meta.crop_info[0].roi_map.width,
11988                     crop_meta.crop_info[0].roi_map.height);
11989             } else {
11990                 LOGE("Invalid reprocess crop count %d!", *crop_count);
11991             }
11992     } else {
11993         LOGE("No crop data from matching output stream");
11994     }
11995 
11996     /* These settings are not needed for regular requests so handle them specially for
11997        reprocess requests; information needed for EXIF tags */
11998     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11999         int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12000                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12001         if (NAME_NOT_FOUND != val) {
12002             uint32_t flashMode = (uint32_t)val;
12003             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
12004                 rc = BAD_VALUE;
12005             }
12006         } else {
12007             LOGE("Could not map fwk flash mode %d to correct hal flash mode",
12008                     frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12009         }
12010     } else {
12011         LOGH("No flash mode in reprocess settings");
12012     }
12013 
12014     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
12015         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
12016         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
12017             rc = BAD_VALUE;
12018         }
12019     } else {
12020         LOGH("No flash state in reprocess settings");
12021     }
12022 
12023     if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
12024         uint8_t *reprocessFlags =
12025             frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
12026         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
12027                 *reprocessFlags)) {
12028                 rc = BAD_VALUE;
12029         }
12030     }
12031 
12032     // Add exif debug data to internal metadata
12033     if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
12034         mm_jpeg_debug_exif_params_t *debug_params =
12035                 (mm_jpeg_debug_exif_params_t *)frame_settings.find
12036                 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
12037         // AE
12038         if (debug_params->ae_debug_params_valid == TRUE) {
12039             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
12040                     debug_params->ae_debug_params);
12041         }
12042         // AWB
12043         if (debug_params->awb_debug_params_valid == TRUE) {
12044             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
12045                 debug_params->awb_debug_params);
12046         }
12047         // AF
12048        if (debug_params->af_debug_params_valid == TRUE) {
12049             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
12050                    debug_params->af_debug_params);
12051         }
12052         // ASD
12053         if (debug_params->asd_debug_params_valid == TRUE) {
12054             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
12055                     debug_params->asd_debug_params);
12056         }
12057         // Stats
12058         if (debug_params->stats_debug_params_valid == TRUE) {
12059             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
12060                     debug_params->stats_debug_params);
12061        }
12062         // BE Stats
12063         if (debug_params->bestats_debug_params_valid == TRUE) {
12064             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
12065                     debug_params->bestats_debug_params);
12066         }
12067         // BHIST
12068         if (debug_params->bhist_debug_params_valid == TRUE) {
12069             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
12070                     debug_params->bhist_debug_params);
12071        }
12072         // 3A Tuning
12073         if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
12074             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
12075                     debug_params->q3a_tuning_debug_params);
12076         }
12077     }
12078 
12079     // Add metadata which reprocess needs
12080     if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
12081         cam_reprocess_info_t *repro_info =
12082                 (cam_reprocess_info_t *)frame_settings.find
12083                 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
12084         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
12085                 repro_info->sensor_crop_info);
12086         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
12087                 repro_info->camif_crop_info);
12088         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
12089                 repro_info->isp_crop_info);
12090         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
12091                 repro_info->cpp_crop_info);
12092         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
12093                 repro_info->af_focal_length_ratio);
12094         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
12095                 repro_info->pipeline_flip);
12096         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
12097                 repro_info->af_roi);
12098         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
12099                 repro_info->dyn_mask);
12100         /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
12101            CAM_INTF_PARM_ROTATION metadata then has been added in
12102            translateToHalMetadata. HAL need to keep this new rotation
12103            metadata. Otherwise, the old rotation info saved in the vendor tag
12104            would be used */
12105         IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
12106                 CAM_INTF_PARM_ROTATION, reprocParam) {
12107             LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
12108         } else {
12109             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
12110                     repro_info->rotation_info);
12111         }
12112     }
12113 
12114     /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
12115        to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
12116        roi.width and roi.height would be the final JPEG size.
12117        For now, HAL only checks this for reprocess request */
12118     if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
12119             frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
12120         uint8_t *enable =
12121             frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
12122         if (*enable == TRUE) {
12123             int32_t *crop_data =
12124                     frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
12125             cam_stream_crop_info_t crop_meta;
12126             memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
12127             crop_meta.stream_id = 0;
12128             crop_meta.crop.left   = crop_data[0];
12129             crop_meta.crop.top    = crop_data[1];
12130             crop_meta.crop.width  = crop_data[2];
12131             crop_meta.crop.height = crop_data[3];
12132             // The JPEG crop roi should match cpp output size
12133             IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
12134                     CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
12135                 crop_meta.roi_map.left = 0;
12136                 crop_meta.roi_map.top = 0;
12137                 crop_meta.roi_map.width = cpp_crop->crop.width;
12138                 crop_meta.roi_map.height = cpp_crop->crop.height;
12139             }
12140             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
12141                     crop_meta);
12142             LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
12143                     crop_meta.crop.left, crop_meta.crop.top,
12144                     crop_meta.crop.width, crop_meta.crop.height, mCameraId);
12145             LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
12146                     crop_meta.roi_map.left, crop_meta.roi_map.top,
12147                     crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
12148 
12149             // Add JPEG scale information
12150             cam_dimension_t scale_dim;
12151             memset(&scale_dim, 0, sizeof(cam_dimension_t));
12152             if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
12153                 int32_t *roi =
12154                     frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
12155                 scale_dim.width = roi[2];
12156                 scale_dim.height = roi[3];
12157                 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
12158                     scale_dim);
12159                 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
12160                     scale_dim.width, scale_dim.height, mCameraId);
12161             }
12162         }
12163     }
12164 
12165     return rc;
12166 }
12167 
12168 /*===========================================================================
12169  * FUNCTION   : saveRequestSettings
12170  *
12171  * DESCRIPTION: Add any settings that might have changed to the request settings
12172  *              and save the settings to be applied on the frame
12173  *
12174  * PARAMETERS :
12175  *   @jpegMetadata : the extracted and/or modified jpeg metadata
12176  *   @request      : request with initial settings
12177  *
12178  * RETURN     :
12179  * camera_metadata_t* : pointer to the saved request settings
12180  *==========================================================================*/
saveRequestSettings(const CameraMetadata & jpegMetadata,camera3_capture_request_t * request)12181 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
12182         const CameraMetadata &jpegMetadata,
12183         camera3_capture_request_t *request)
12184 {
12185     camera_metadata_t *resultMetadata;
12186     CameraMetadata camMetadata;
12187     camMetadata = request->settings;
12188 
12189     if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12190         int32_t thumbnail_size[2];
12191         thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12192         thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12193         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
12194                 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
12195     }
12196 
12197     if (request->input_buffer != NULL) {
12198         uint8_t reprocessFlags = 1;
12199         camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12200                 (uint8_t*)&reprocessFlags,
12201                 sizeof(reprocessFlags));
12202     }
12203 
12204     resultMetadata = camMetadata.release();
12205     return resultMetadata;
12206 }
12207 
12208 /*===========================================================================
12209  * FUNCTION   : setHalFpsRange
12210  *
12211  * DESCRIPTION: set FPS range parameter
12212  *
12213  *
12214  * PARAMETERS :
12215  *   @settings    : Metadata from framework
12216  *   @hal_metadata: Metadata buffer
12217  *
12218  *
12219  * RETURN     : success: NO_ERROR
12220  *              failure:
12221  *==========================================================================*/
setHalFpsRange(const CameraMetadata & settings,metadata_buffer_t * hal_metadata)12222 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12223         metadata_buffer_t *hal_metadata)
12224 {
12225     int32_t rc = NO_ERROR;
12226     cam_fps_range_t fps_range;
12227     fps_range.min_fps = (float)
12228             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12229     fps_range.max_fps = (float)
12230             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12231     fps_range.video_min_fps = fps_range.min_fps;
12232     fps_range.video_max_fps = fps_range.max_fps;
12233 
12234     LOGD("aeTargetFpsRange fps: [%f %f]",
12235             fps_range.min_fps, fps_range.max_fps);
12236     /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12237      * follows:
12238      * ---------------------------------------------------------------|
12239      *      Video stream is absent in configure_streams               |
12240      *    (Camcorder preview before the first video record            |
12241      * ---------------------------------------------------------------|
12242      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12243      *                   |             |             | vid_min/max_fps|
12244      * ---------------------------------------------------------------|
12245      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
12246      *                   |-------------|-------------|----------------|
12247      *                   |  [240, 240] |     240     |  [240, 240]    |
12248      * ---------------------------------------------------------------|
12249      *     Video stream is present in configure_streams               |
12250      * ---------------------------------------------------------------|
12251      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12252      *                   |             |             | vid_min/max_fps|
12253      * ---------------------------------------------------------------|
12254      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
12255      * (camcorder prev   |-------------|-------------|----------------|
12256      *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
12257      *  is stopped)      |             |             |                |
12258      * ---------------------------------------------------------------|
12259      *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
12260      *                   |-------------|-------------|----------------|
12261      *                   |  [240, 240] |     240     |  [240, 240]    |
12262      * ---------------------------------------------------------------|
12263      * When Video stream is absent in configure_streams,
12264      * preview fps = sensor_fps / batchsize
12265      * Eg: for 240fps at batchSize 4, preview = 60fps
12266      *     for 120fps at batchSize 4, preview = 30fps
12267      *
12268      * When video stream is present in configure_streams, preview fps is as per
12269      * the ratio of preview buffers to video buffers requested in process
12270      * capture request
12271      */
12272     mBatchSize = 0;
12273     if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12274         fps_range.min_fps = fps_range.video_max_fps;
12275         fps_range.video_min_fps = fps_range.video_max_fps;
12276         int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12277                 fps_range.max_fps);
12278         if (NAME_NOT_FOUND != val) {
12279             cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12280             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12281                 return BAD_VALUE;
12282             }
12283 
12284             if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12285                 /* If batchmode is currently in progress and the fps changes,
12286                  * set the flag to restart the sensor */
12287                 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12288                         (mHFRVideoFps != fps_range.max_fps)) {
12289                     mNeedSensorRestart = true;
12290                 }
12291                 mHFRVideoFps = fps_range.max_fps;
12292                 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12293                 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12294                     mBatchSize = MAX_HFR_BATCH_SIZE;
12295                 }
12296              }
12297             LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12298 
12299          }
12300     } else {
12301         /* HFR mode is session param in backend/ISP. This should be reset when
12302          * in non-HFR mode  */
12303         cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12304         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12305             return BAD_VALUE;
12306         }
12307     }
12308     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12309         return BAD_VALUE;
12310     }
12311     LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12312             fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12313     return rc;
12314 }
12315 
12316 /*===========================================================================
12317  * FUNCTION   : translateToHalMetadata
12318  *
12319  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12320  *
12321  *
12322  * PARAMETERS :
12323  *   @request  : request sent from framework
12324  *
12325  *
12326  * RETURN     : success: NO_ERROR
12327  *              failure:
12328  *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)12329 int QCamera3HardwareInterface::translateToHalMetadata
12330                                   (const camera3_capture_request_t *request,
12331                                    metadata_buffer_t *hal_metadata,
12332                                    uint32_t snapshotStreamId) {
12333     if (request == nullptr || hal_metadata == nullptr) {
12334         return BAD_VALUE;
12335     }
12336 
12337     int64_t minFrameDuration = getMinFrameDuration(request);
12338 
12339     return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12340             minFrameDuration);
12341 }
12342 
translateFwkMetadataToHalMetadata(const camera_metadata_t * frameworkMetadata,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId,int64_t minFrameDuration)12343 int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12344         const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12345         uint32_t snapshotStreamId, int64_t minFrameDuration) {
12346 
12347     int rc = 0;
12348     CameraMetadata frame_settings;
12349     frame_settings = frameworkMetadata;
12350 
12351     /* Do not change the order of the following list unless you know what you are
12352      * doing.
12353      * The order is laid out in such a way that parameters in the front of the table
12354      * may be used to override the parameters later in the table. Examples are:
12355      * 1. META_MODE should precede AEC/AWB/AF MODE
12356      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12357      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12358      * 4. Any mode should precede it's corresponding settings
12359      */
12360     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12361         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12362         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12363             rc = BAD_VALUE;
12364         }
12365         rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12366         if (rc != NO_ERROR) {
12367             LOGE("extractSceneMode failed");
12368         }
12369     }
12370 
12371     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12372         uint8_t fwk_aeMode =
12373             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12374         uint8_t aeMode;
12375         int32_t redeye;
12376 
12377         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12378             aeMode = CAM_AE_MODE_OFF;
12379         } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12380             aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
12381         } else {
12382             aeMode = CAM_AE_MODE_ON;
12383         }
12384         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12385             redeye = 1;
12386         } else {
12387             redeye = 0;
12388         }
12389 
12390         int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12391                 fwk_aeMode);
12392         if (NAME_NOT_FOUND != val) {
12393             int32_t flashMode = (int32_t)val;
12394             ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12395         }
12396 
12397         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12398         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12399             rc = BAD_VALUE;
12400         }
12401     }
12402 
12403     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12404         uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12405         int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12406                 fwk_whiteLevel);
12407         if (NAME_NOT_FOUND != val) {
12408             uint8_t whiteLevel = (uint8_t)val;
12409             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12410                 rc = BAD_VALUE;
12411             }
12412         }
12413     }
12414 
12415     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12416         uint8_t fwk_cacMode =
12417                 frame_settings.find(
12418                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12419         int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12420                 fwk_cacMode);
12421         if (NAME_NOT_FOUND != val) {
12422             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12423             bool entryAvailable = FALSE;
12424             // Check whether Frameworks set CAC mode is supported in device or not
12425             for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12426                 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12427                     entryAvailable = TRUE;
12428                     break;
12429                 }
12430             }
12431             LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12432             // If entry not found then set the device supported mode instead of frameworks mode i.e,
12433             // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12434             // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12435             if (entryAvailable == FALSE) {
12436                 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12437                     cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12438                 } else {
12439                     if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12440                         // High is not supported and so set the FAST as spec say's underlying
12441                         // device implementation can be the same for both modes.
12442                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12443                     } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12444                         // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12445                         // in order to avoid the fps drop due to high quality
12446                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12447                     } else {
12448                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12449                     }
12450                 }
12451             }
12452             LOGD("Final cacMode is %d", cacMode);
12453             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12454                 rc = BAD_VALUE;
12455             }
12456         } else {
12457             LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12458         }
12459     }
12460 
12461     uint8_t fwk_focusMode = 0;
12462     if (m_bForceInfinityAf == 0) {
12463         if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
12464             fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
12465             int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12466                     fwk_focusMode);
12467             if (NAME_NOT_FOUND != val) {
12468                 uint8_t focusMode = (uint8_t)val;
12469                 LOGD("set focus mode %d", focusMode);
12470                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12471                          CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12472                     rc = BAD_VALUE;
12473                 }
12474             }
12475         } else {
12476             LOGE("Fatal: Missing ANDROID_CONTROL_AF_MODE");
12477         }
12478     } else {
12479         uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12480         LOGE("Focus forced to infinity %d", focusMode);
12481         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12482             rc = BAD_VALUE;
12483         }
12484     }
12485 
12486     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12487             fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
12488         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12489         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12490                 focalDistance)) {
12491             rc = BAD_VALUE;
12492         }
12493     }
12494 
12495     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12496         uint8_t fwk_antibandingMode =
12497                 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12498         int val = lookupHalName(ANTIBANDING_MODES_MAP,
12499                 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12500         if (NAME_NOT_FOUND != val) {
12501             uint32_t hal_antibandingMode = (uint32_t)val;
12502             if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12503                 if (m60HzZone) {
12504                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12505                 } else {
12506                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12507                 }
12508             }
12509             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12510                     hal_antibandingMode)) {
12511                 rc = BAD_VALUE;
12512             }
12513         }
12514     }
12515 
12516     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12517         int32_t expCompensation = frame_settings.find(
12518                 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12519         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12520             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12521         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12522             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
12523         LOGD("Setting compensation:%d", expCompensation);
12524         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12525                 expCompensation)) {
12526             rc = BAD_VALUE;
12527         }
12528     }
12529 
12530     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12531         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12532         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12533             rc = BAD_VALUE;
12534         }
12535     }
12536     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12537         rc = setHalFpsRange(frame_settings, hal_metadata);
12538         if (rc != NO_ERROR) {
12539             LOGE("setHalFpsRange failed");
12540         }
12541     }
12542 
12543     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12544         uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12545         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12546             rc = BAD_VALUE;
12547         }
12548     }
12549 
12550     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12551         uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12552         int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12553                 fwk_effectMode);
12554         if (NAME_NOT_FOUND != val) {
12555             uint8_t effectMode = (uint8_t)val;
12556             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12557                 rc = BAD_VALUE;
12558             }
12559         }
12560     }
12561 
12562     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12563         uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12564         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12565                 colorCorrectMode)) {
12566             rc = BAD_VALUE;
12567         }
12568     }
12569 
12570     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12571         cam_color_correct_gains_t colorCorrectGains;
12572         for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12573             colorCorrectGains.gains[i] =
12574                     frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12575         }
12576         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12577                 colorCorrectGains)) {
12578             rc = BAD_VALUE;
12579         }
12580     }
12581 
12582     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12583         cam_color_correct_matrix_t colorCorrectTransform;
12584         cam_rational_type_t transform_elem;
12585         size_t num = 0;
12586         for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12587            for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12588               transform_elem.numerator =
12589                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12590               transform_elem.denominator =
12591                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12592               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12593               num++;
12594            }
12595         }
12596         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12597                 colorCorrectTransform)) {
12598             rc = BAD_VALUE;
12599         }
12600     }
12601 
12602     cam_trigger_t aecTrigger;
12603     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12604     aecTrigger.trigger_id = -1;
12605     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12606         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12607         aecTrigger.trigger =
12608             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12609         aecTrigger.trigger_id =
12610             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12611         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12612                 aecTrigger)) {
12613             rc = BAD_VALUE;
12614         }
12615         LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12616                 aecTrigger.trigger, aecTrigger.trigger_id);
12617     }
12618 
12619     /*af_trigger must come with a trigger id*/
12620     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12621         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12622         cam_trigger_t af_trigger;
12623         af_trigger.trigger =
12624             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12625         af_trigger.trigger_id =
12626             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12627         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12628             rc = BAD_VALUE;
12629         }
12630         LOGD("AfTrigger: %d AfTriggerID: %d",
12631                 af_trigger.trigger, af_trigger.trigger_id);
12632     }
12633 
12634     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12635         int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12636         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12637             rc = BAD_VALUE;
12638         }
12639     }
12640     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12641         cam_edge_application_t edge_application;
12642         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
12643 
12644         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12645             edge_application.sharpness = 0;
12646         } else {
12647             edge_application.sharpness =
12648                     gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12649             if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12650                 int32_t sharpness =
12651                         frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12652                 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12653                     sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12654                     LOGD("Setting edge mode sharpness %d", sharpness);
12655                     edge_application.sharpness = sharpness;
12656                 }
12657             }
12658         }
12659         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12660             rc = BAD_VALUE;
12661         }
12662     }
12663 
12664     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12665         int32_t respectFlashMode = 1;
12666         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12667             uint8_t fwk_aeMode =
12668                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12669             if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12670                     fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12671                     fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12672                 respectFlashMode = 0;
12673                 LOGH("AE Mode controls flash, ignore android.flash.mode");
12674             }
12675         }
12676         if (respectFlashMode) {
12677             int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12678                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12679             LOGH("flash mode after mapping %d", val);
12680             // To check: CAM_INTF_META_FLASH_MODE usage
12681             if (NAME_NOT_FOUND != val) {
12682                 uint8_t flashMode = (uint8_t)val;
12683                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12684                     rc = BAD_VALUE;
12685                 }
12686             }
12687         }
12688     }
12689 
12690     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12691         uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12692         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12693             rc = BAD_VALUE;
12694         }
12695     }
12696 
12697     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12698         int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12699         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12700                 flashFiringTime)) {
12701             rc = BAD_VALUE;
12702         }
12703     }
12704 
12705     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12706         uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12707         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12708                 hotPixelMode)) {
12709             rc = BAD_VALUE;
12710         }
12711     }
12712 
12713     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12714         float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12715         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12716                 lensAperture)) {
12717             rc = BAD_VALUE;
12718         }
12719     }
12720 
12721     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12722         float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12723         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12724                 filterDensity)) {
12725             rc = BAD_VALUE;
12726         }
12727     }
12728 
12729     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12730         float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12731         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12732                 focalLength)) {
12733             rc = BAD_VALUE;
12734         }
12735     }
12736 
12737     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12738         uint8_t optStabMode =
12739                 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12740         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12741                 optStabMode)) {
12742             rc = BAD_VALUE;
12743         }
12744     }
12745 
12746     if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12747         uint8_t videoStabMode =
12748                 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12749         LOGD("videoStabMode from APP = %d", videoStabMode);
12750         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12751                 videoStabMode)) {
12752             rc = BAD_VALUE;
12753         }
12754     }
12755 
12756 
12757     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12758         uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12759         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12760                 noiseRedMode)) {
12761             rc = BAD_VALUE;
12762         }
12763     }
12764 
12765     if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12766         float reprocessEffectiveExposureFactor =
12767             frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12768         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12769                 reprocessEffectiveExposureFactor)) {
12770             rc = BAD_VALUE;
12771         }
12772     }
12773 
12774     cam_crop_region_t scalerCropRegion;
12775     bool scalerCropSet = false;
12776     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12777         scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12778         scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12779         scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12780         scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12781 
12782         // Map coordinate system from active array to sensor output.
12783         mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12784                 scalerCropRegion.width, scalerCropRegion.height);
12785 
12786         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12787                 scalerCropRegion)) {
12788             rc = BAD_VALUE;
12789         }
12790         scalerCropSet = true;
12791     }
12792 
12793     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12794         int64_t sensorExpTime =
12795                 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12796         LOGD("setting sensorExpTime %lld", sensorExpTime);
12797         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12798                 sensorExpTime)) {
12799             rc = BAD_VALUE;
12800         }
12801     }
12802 
12803     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12804         int64_t sensorFrameDuration =
12805                 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
12806         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12807         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12808             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12809         LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12810         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12811                 sensorFrameDuration)) {
12812             rc = BAD_VALUE;
12813         }
12814     }
12815 
12816     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12817         int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12818         if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12819                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12820         if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12821                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12822         LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12823         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12824                 sensorSensitivity)) {
12825             rc = BAD_VALUE;
12826         }
12827     }
12828 
12829 #ifndef USE_HAL_3_3
12830     if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12831         int32_t ispSensitivity =
12832             frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12833         if (ispSensitivity <
12834             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12835                 ispSensitivity =
12836                     gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12837                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12838         }
12839         if (ispSensitivity >
12840             gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12841                 ispSensitivity =
12842                     gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12843                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12844         }
12845         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12846                 ispSensitivity)) {
12847             rc = BAD_VALUE;
12848         }
12849     }
12850 #endif
12851 
12852     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12853         uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12854         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12855             rc = BAD_VALUE;
12856         }
12857     }
12858 
12859     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12860         uint8_t fwk_facedetectMode =
12861                 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12862 
12863         int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12864                 fwk_facedetectMode);
12865 
12866         if (NAME_NOT_FOUND != val) {
12867             uint8_t facedetectMode = (uint8_t)val;
12868             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12869                     facedetectMode)) {
12870                 rc = BAD_VALUE;
12871             }
12872         }
12873     }
12874 
12875     if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
12876         uint8_t histogramMode =
12877                 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
12878         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12879                 histogramMode)) {
12880             rc = BAD_VALUE;
12881         }
12882     }
12883 
12884     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12885         uint8_t sharpnessMapMode =
12886                 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12887         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12888                 sharpnessMapMode)) {
12889             rc = BAD_VALUE;
12890         }
12891     }
12892 
12893     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12894         uint8_t tonemapMode =
12895                 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12896         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12897             rc = BAD_VALUE;
12898         }
12899     }
12900     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12901     /*All tonemap channels will have the same number of points*/
12902     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12903         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12904         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12905         cam_rgb_tonemap_curves tonemapCurves;
12906         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12907         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12908             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12909                      tonemapCurves.tonemap_points_cnt,
12910                     CAM_MAX_TONEMAP_CURVE_SIZE);
12911             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12912         }
12913 
12914         /* ch0 = G*/
12915         size_t point = 0;
12916         cam_tonemap_curve_t tonemapCurveGreen;
12917         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12918             for (size_t j = 0; j < 2; j++) {
12919                tonemapCurveGreen.tonemap_points[i][j] =
12920                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12921                point++;
12922             }
12923         }
12924         tonemapCurves.curves[0] = tonemapCurveGreen;
12925 
12926         /* ch 1 = B */
12927         point = 0;
12928         cam_tonemap_curve_t tonemapCurveBlue;
12929         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12930             for (size_t j = 0; j < 2; j++) {
12931                tonemapCurveBlue.tonemap_points[i][j] =
12932                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12933                point++;
12934             }
12935         }
12936         tonemapCurves.curves[1] = tonemapCurveBlue;
12937 
12938         /* ch 2 = R */
12939         point = 0;
12940         cam_tonemap_curve_t tonemapCurveRed;
12941         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12942             for (size_t j = 0; j < 2; j++) {
12943                tonemapCurveRed.tonemap_points[i][j] =
12944                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12945                point++;
12946             }
12947         }
12948         tonemapCurves.curves[2] = tonemapCurveRed;
12949 
12950         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12951                 tonemapCurves)) {
12952             rc = BAD_VALUE;
12953         }
12954     }
12955 
12956     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12957         uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12958         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12959                 captureIntent)) {
12960             rc = BAD_VALUE;
12961         }
12962     }
12963 
12964     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12965         uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12966         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12967                 blackLevelLock)) {
12968             rc = BAD_VALUE;
12969         }
12970     }
12971 
12972     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12973         uint8_t lensShadingMapMode =
12974                 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12975         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12976                 lensShadingMapMode)) {
12977             rc = BAD_VALUE;
12978         }
12979     }
12980 
12981     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12982         cam_area_t roi;
12983         bool reset = true;
12984         convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
12985 
12986         // Map coordinate system from active array to sensor output.
12987         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12988                 roi.rect.height);
12989 
12990         if (scalerCropSet) {
12991             reset = resetIfNeededROI(&roi, &scalerCropRegion);
12992         }
12993         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12994             rc = BAD_VALUE;
12995         }
12996     }
12997 
12998     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12999         cam_area_t roi;
13000         bool reset = true;
13001         convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
13002 
13003         // Map coordinate system from active array to sensor output.
13004         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
13005                 roi.rect.height);
13006 
13007         if (scalerCropSet) {
13008             reset = resetIfNeededROI(&roi, &scalerCropRegion);
13009         }
13010         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
13011             rc = BAD_VALUE;
13012         }
13013     }
13014 
13015     // CDS for non-HFR non-video mode
13016     if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
13017             !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
13018         int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
13019         if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
13020             LOGE("Invalid CDS mode %d!", *fwk_cds);
13021         } else {
13022             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13023                     CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
13024                 rc = BAD_VALUE;
13025             }
13026         }
13027     }
13028 
13029     // Video HDR
13030     cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
13031     if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
13032         vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
13033     }
13034     if (m_bVideoHdrEnabled)
13035         vhdr = CAM_VIDEO_HDR_MODE_ON;
13036 
13037     int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
13038 
13039     if(vhdr != curr_hdr_state)
13040         LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
13041 
13042     rc = setVideoHdrMode(mParameters, vhdr);
13043     if (rc != NO_ERROR) {
13044         LOGE("setVideoHDR is failed");
13045     }
13046 
13047     //IR
13048     if(frame_settings.exists(QCAMERA3_IR_MODE)) {
13049         cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
13050                 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
13051         uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
13052         uint8_t isIRon = 0;
13053 
13054         (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
13055         if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
13056             LOGE("Invalid IR mode %d!", fwk_ir);
13057         } else {
13058             if(isIRon != curr_ir_state )
13059                LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
13060 
13061             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13062                     CAM_INTF_META_IR_MODE, fwk_ir)) {
13063                 rc = BAD_VALUE;
13064             }
13065         }
13066     }
13067 
13068     //Binning Correction Mode
13069     if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
13070         cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
13071                 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
13072         if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
13073                 || (0 > fwk_binning_correction)) {
13074             LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
13075         } else {
13076             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13077                     CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
13078                 rc = BAD_VALUE;
13079             }
13080         }
13081     }
13082 
13083     if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
13084         float aec_speed;
13085         aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
13086         LOGD("AEC Speed :%f", aec_speed);
13087         if ( aec_speed < 0 ) {
13088             LOGE("Invalid AEC mode %f!", aec_speed);
13089         } else {
13090             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
13091                     aec_speed)) {
13092                 rc = BAD_VALUE;
13093             }
13094         }
13095     }
13096 
13097     if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
13098         float awb_speed;
13099         awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
13100         LOGD("AWB Speed :%f", awb_speed);
13101         if ( awb_speed < 0 ) {
13102             LOGE("Invalid AWB mode %f!", awb_speed);
13103         } else {
13104             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
13105                     awb_speed)) {
13106                 rc = BAD_VALUE;
13107             }
13108         }
13109     }
13110 
13111     // TNR
13112     if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
13113         frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
13114         uint8_t b_TnrRequested = 0;
13115         uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
13116         cam_denoise_param_t tnr;
13117         tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
13118         tnr.process_plates =
13119             (cam_denoise_process_type_t)frame_settings.find(
13120             QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
13121         b_TnrRequested = tnr.denoise_enable;
13122 
13123         if(b_TnrRequested != curr_tnr_state)
13124            LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
13125 
13126         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
13127             rc = BAD_VALUE;
13128         }
13129     }
13130 
13131     if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
13132         int32_t* exposure_metering_mode =
13133                 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
13134         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
13135                 *exposure_metering_mode)) {
13136             rc = BAD_VALUE;
13137         }
13138     }
13139 
13140     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
13141         int32_t fwk_testPatternMode =
13142                 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
13143         int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
13144                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
13145 
13146         if (NAME_NOT_FOUND != testPatternMode) {
13147             cam_test_pattern_data_t testPatternData;
13148             memset(&testPatternData, 0, sizeof(testPatternData));
13149             testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
13150             if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
13151                     frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
13152                 int32_t *fwk_testPatternData =
13153                         frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
13154                 testPatternData.r = fwk_testPatternData[0];
13155                 testPatternData.b = fwk_testPatternData[3];
13156                 switch (gCamCapability[mCameraId]->color_arrangement) {
13157                     case CAM_FILTER_ARRANGEMENT_RGGB:
13158                     case CAM_FILTER_ARRANGEMENT_GRBG:
13159                         testPatternData.gr = fwk_testPatternData[1];
13160                         testPatternData.gb = fwk_testPatternData[2];
13161                         break;
13162                     case CAM_FILTER_ARRANGEMENT_GBRG:
13163                     case CAM_FILTER_ARRANGEMENT_BGGR:
13164                         testPatternData.gr = fwk_testPatternData[2];
13165                         testPatternData.gb = fwk_testPatternData[1];
13166                         break;
13167                     default:
13168                         LOGE("color arrangement %d is not supported",
13169                                 gCamCapability[mCameraId]->color_arrangement);
13170                         break;
13171                 }
13172             }
13173             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
13174                     testPatternData)) {
13175                 rc = BAD_VALUE;
13176             }
13177         } else {
13178             LOGE("Invalid framework sensor test pattern mode %d",
13179                     fwk_testPatternMode);
13180         }
13181     }
13182 
13183     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
13184         size_t count = 0;
13185         camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
13186         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
13187                 gps_coords.data.d, gps_coords.count, count);
13188         if (gps_coords.count != count) {
13189             rc = BAD_VALUE;
13190         }
13191     }
13192 
13193     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
13194         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
13195         size_t count = 0;
13196         const char *gps_methods_src = (const char *)
13197                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
13198         memset(gps_methods, '\0', sizeof(gps_methods));
13199         strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
13200         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13201                 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13202         if (GPS_PROCESSING_METHOD_SIZE != count) {
13203             rc = BAD_VALUE;
13204         }
13205     }
13206 
13207     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13208         int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13209         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13210                 gps_timestamp)) {
13211             rc = BAD_VALUE;
13212         }
13213     }
13214 
13215     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13216         int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13217         cam_rotation_info_t rotation_info;
13218         if (orientation == 0) {
13219            rotation_info.rotation = ROTATE_0;
13220         } else if (orientation == 90) {
13221            rotation_info.rotation = ROTATE_90;
13222         } else if (orientation == 180) {
13223            rotation_info.rotation = ROTATE_180;
13224         } else if (orientation == 270) {
13225            rotation_info.rotation = ROTATE_270;
13226         }
13227         rotation_info.device_rotation = ROTATE_0;
13228         rotation_info.streamId = snapshotStreamId;
13229         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13230         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13231             rc = BAD_VALUE;
13232         }
13233     }
13234 
13235     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13236         uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13237         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13238             rc = BAD_VALUE;
13239         }
13240     }
13241 
13242     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13243         uint32_t thumb_quality = (uint32_t)
13244                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13245         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13246                 thumb_quality)) {
13247             rc = BAD_VALUE;
13248         }
13249     }
13250 
13251     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13252         cam_dimension_t dim;
13253         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13254         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13255         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13256             rc = BAD_VALUE;
13257         }
13258     }
13259 
13260     // Internal metadata
13261     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13262         size_t count = 0;
13263         camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13264         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13265                 privatedata.data.i32, privatedata.count, count);
13266         if (privatedata.count != count) {
13267             rc = BAD_VALUE;
13268         }
13269     }
13270 
13271     // ISO/Exposure Priority
13272     if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13273         frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13274         cam_priority_mode_t mode =
13275                 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13276         if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13277             cam_intf_parm_manual_3a_t use_iso_exp_pty;
13278             use_iso_exp_pty.previewOnly = FALSE;
13279             uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13280             use_iso_exp_pty.value = *ptr;
13281 
13282             if(CAM_ISO_PRIORITY == mode) {
13283                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13284                         use_iso_exp_pty)) {
13285                     rc = BAD_VALUE;
13286                 }
13287             }
13288             else {
13289                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13290                         use_iso_exp_pty)) {
13291                     rc = BAD_VALUE;
13292                 }
13293             }
13294 
13295             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13296                     rc = BAD_VALUE;
13297             }
13298         }
13299     } else {
13300         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13301             rc = BAD_VALUE;
13302         }
13303     }
13304 
13305     // Saturation
13306     if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13307         int32_t* use_saturation =
13308                 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13309         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13310             rc = BAD_VALUE;
13311         }
13312     }
13313 
13314     // EV step
13315     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13316             gCamCapability[mCameraId]->exp_compensation_step)) {
13317         rc = BAD_VALUE;
13318     }
13319 
13320     // CDS info
13321     if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13322         cam_cds_data_t *cdsData = (cam_cds_data_t *)
13323                 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13324 
13325         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13326                 CAM_INTF_META_CDS_DATA, *cdsData)) {
13327             rc = BAD_VALUE;
13328         }
13329     }
13330 
13331     // Hybrid AE
13332     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13333         uint8_t *hybrid_ae = (uint8_t *)
13334                 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
13335         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13336             rc = BAD_VALUE;
13337         }
13338     }
13339 
13340     // Histogram
13341     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13342         uint8_t histogramMode =
13343                 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13344         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13345                 histogramMode)) {
13346             rc = BAD_VALUE;
13347         }
13348     }
13349 
13350     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13351         int32_t histogramBins =
13352                  frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13353         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13354                 histogramBins)) {
13355             rc = BAD_VALUE;
13356         }
13357     }
13358 
13359     // Tracking AF
13360     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13361         uint8_t trackingAfTrigger =
13362                 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13363         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13364                 trackingAfTrigger)) {
13365             rc = BAD_VALUE;
13366         }
13367     }
13368 
13369     // Makernote
13370     camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13371     if (entry.count != 0) {
13372         if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13373             cam_makernote_t makernote;
13374             makernote.length = entry.count;
13375             memcpy(makernote.data, entry.data.u8, makernote.length);
13376             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13377                 rc = BAD_VALUE;
13378             }
13379         } else {
13380             ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13381                     MAX_MAKERNOTE_LENGTH);
13382             rc = BAD_VALUE;
13383         }
13384     }
13385 
13386     return rc;
13387 }
13388 
13389 /*===========================================================================
13390  * FUNCTION   : captureResultCb
13391  *
13392  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13393  *
13394  * PARAMETERS :
13395  *   @frame  : frame information from mm-camera-interface
13396  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13397  *   @userdata: userdata
13398  *
13399  * RETURN     : NONE
13400  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer,void * userdata)13401 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13402                 camera3_stream_buffer_t *buffer,
13403                 uint32_t frame_number, bool isInputBuffer, void *userdata)
13404 {
13405     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13406     if (hw == NULL) {
13407         LOGE("Invalid hw %p", hw);
13408         return;
13409     }
13410 
13411     hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13412     return;
13413 }
13414 
13415 /*===========================================================================
13416  * FUNCTION   : setBufferErrorStatus
13417  *
13418  * DESCRIPTION: Callback handler for channels to report any buffer errors
13419  *
13420  * PARAMETERS :
13421  *   @ch     : Channel on which buffer error is reported from
13422  *   @frame_number  : frame number on which buffer error is reported on
13423  *   @buffer_status : buffer error status
13424  *   @userdata: userdata
13425  *
13426  * RETURN     : NONE
13427  *==========================================================================*/
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frame_number,camera3_buffer_status_t err,void * userdata)13428 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13429         uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13430 {
13431     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13432     if (hw == NULL) {
13433         LOGE("Invalid hw %p", hw);
13434         return;
13435     }
13436 
13437     hw->setBufferErrorStatus(ch, frame_number, err);
13438     return;
13439 }
13440 
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frameNumber,camera3_buffer_status_t err)13441 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13442         uint32_t frameNumber, camera3_buffer_status_t err)
13443 {
13444     LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13445     pthread_mutex_lock(&mMutex);
13446 
13447     for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13448         if (req.frame_number != frameNumber)
13449             continue;
13450         for (auto& k : req.mPendingBufferList) {
13451             if(k.stream->priv == ch) {
13452                 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13453             }
13454         }
13455     }
13456 
13457     pthread_mutex_unlock(&mMutex);
13458     return;
13459 }
13460 /*===========================================================================
13461  * FUNCTION   : initialize
13462  *
13463  * DESCRIPTION: Pass framework callback pointers to HAL
13464  *
13465  * PARAMETERS :
13466  *
13467  *
13468  * RETURN     : Success : 0
13469  *              Failure: -ENODEV
13470  *==========================================================================*/
13471 
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)13472 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13473                                   const camera3_callback_ops_t *callback_ops)
13474 {
13475     LOGD("E");
13476     QCamera3HardwareInterface *hw =
13477         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13478     if (!hw) {
13479         LOGE("NULL camera device");
13480         return -ENODEV;
13481     }
13482 
13483     int rc = hw->initialize(callback_ops);
13484     LOGD("X");
13485     return rc;
13486 }
13487 
13488 /*===========================================================================
13489  * FUNCTION   : configure_streams
13490  *
13491  * DESCRIPTION:
13492  *
13493  * PARAMETERS :
13494  *
13495  *
13496  * RETURN     : Success: 0
13497  *              Failure: -EINVAL (if stream configuration is invalid)
13498  *                       -ENODEV (fatal error)
13499  *==========================================================================*/
13500 
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)13501 int QCamera3HardwareInterface::configure_streams(
13502         const struct camera3_device *device,
13503         camera3_stream_configuration_t *stream_list)
13504 {
13505     LOGD("E");
13506     QCamera3HardwareInterface *hw =
13507         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13508     if (!hw) {
13509         LOGE("NULL camera device");
13510         return -ENODEV;
13511     }
13512     int rc = hw->configureStreams(stream_list);
13513     LOGD("X");
13514     return rc;
13515 }
13516 
13517 /*===========================================================================
13518  * FUNCTION   : construct_default_request_settings
13519  *
13520  * DESCRIPTION: Configure a settings buffer to meet the required use case
13521  *
13522  * PARAMETERS :
13523  *
13524  *
13525  * RETURN     : Success: Return valid metadata
13526  *              Failure: Return NULL
13527  *==========================================================================*/
13528 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)13529     construct_default_request_settings(const struct camera3_device *device,
13530                                         int type)
13531 {
13532 
13533     LOGD("E");
13534     camera_metadata_t* fwk_metadata = NULL;
13535     QCamera3HardwareInterface *hw =
13536         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13537     if (!hw) {
13538         LOGE("NULL camera device");
13539         return NULL;
13540     }
13541 
13542     fwk_metadata = hw->translateCapabilityToMetadata(type);
13543 
13544     LOGD("X");
13545     return fwk_metadata;
13546 }
13547 
13548 /*===========================================================================
13549  * FUNCTION   : process_capture_request
13550  *
13551  * DESCRIPTION:
13552  *
13553  * PARAMETERS :
13554  *
13555  *
13556  * RETURN     :
13557  *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)13558 int QCamera3HardwareInterface::process_capture_request(
13559                     const struct camera3_device *device,
13560                     camera3_capture_request_t *request)
13561 {
13562     LOGD("E");
13563     CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
13564     QCamera3HardwareInterface *hw =
13565         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13566     if (!hw) {
13567         LOGE("NULL camera device");
13568         return -EINVAL;
13569     }
13570 
13571     int rc = hw->orchestrateRequest(request);
13572     LOGD("X");
13573     return rc;
13574 }
13575 
13576 /*===========================================================================
13577  * FUNCTION   : dump
13578  *
13579  * DESCRIPTION:
13580  *
13581  * PARAMETERS :
13582  *
13583  *
13584  * RETURN     :
13585  *==========================================================================*/
13586 
dump(const struct camera3_device * device,int fd)13587 void QCamera3HardwareInterface::dump(
13588                 const struct camera3_device *device, int fd)
13589 {
13590     /* Log level property is read when "adb shell dumpsys media.camera" is
13591        called so that the log level can be controlled without restarting
13592        the media server */
13593     getLogLevel();
13594 
13595     LOGD("E");
13596     QCamera3HardwareInterface *hw =
13597         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13598     if (!hw) {
13599         LOGE("NULL camera device");
13600         return;
13601     }
13602 
13603     hw->dump(fd);
13604     LOGD("X");
13605     return;
13606 }
13607 
13608 /*===========================================================================
13609  * FUNCTION   : flush
13610  *
13611  * DESCRIPTION:
13612  *
13613  * PARAMETERS :
13614  *
13615  *
13616  * RETURN     :
13617  *==========================================================================*/
13618 
flush(const struct camera3_device * device)13619 int QCamera3HardwareInterface::flush(
13620                 const struct camera3_device *device)
13621 {
13622     int rc;
13623     LOGD("E");
13624     QCamera3HardwareInterface *hw =
13625         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13626     if (!hw) {
13627         LOGE("NULL camera device");
13628         return -EINVAL;
13629     }
13630 
13631     pthread_mutex_lock(&hw->mMutex);
13632     // Validate current state
13633     switch (hw->mState) {
13634         case STARTED:
13635             /* valid state */
13636             break;
13637 
13638         case ERROR:
13639             pthread_mutex_unlock(&hw->mMutex);
13640             hw->handleCameraDeviceError();
13641             return -ENODEV;
13642 
13643         default:
13644             LOGI("Flush returned during state %d", hw->mState);
13645             pthread_mutex_unlock(&hw->mMutex);
13646             return 0;
13647     }
13648     pthread_mutex_unlock(&hw->mMutex);
13649 
13650     rc = hw->flush(true /* restart channels */ );
13651     LOGD("X");
13652     return rc;
13653 }
13654 
13655 /*===========================================================================
13656  * FUNCTION   : close_camera_device
13657  *
13658  * DESCRIPTION:
13659  *
13660  * PARAMETERS :
13661  *
13662  *
13663  * RETURN     :
13664  *==========================================================================*/
close_camera_device(struct hw_device_t * device)13665 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13666 {
13667     int ret = NO_ERROR;
13668     QCamera3HardwareInterface *hw =
13669         reinterpret_cast<QCamera3HardwareInterface *>(
13670             reinterpret_cast<camera3_device_t *>(device)->priv);
13671     if (!hw) {
13672         LOGE("NULL camera device");
13673         return BAD_VALUE;
13674     }
13675 
13676     LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13677     delete hw;
13678     LOGI("[KPI Perf]: X");
13679     CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
13680     return ret;
13681 }
13682 
13683 /*===========================================================================
13684  * FUNCTION   : getWaveletDenoiseProcessPlate
13685  *
13686  * DESCRIPTION: query wavelet denoise process plate
13687  *
13688  * PARAMETERS : None
13689  *
13690  * RETURN     : WNR prcocess plate value
13691  *==========================================================================*/
getWaveletDenoiseProcessPlate()13692 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13693 {
13694     char prop[PROPERTY_VALUE_MAX];
13695     memset(prop, 0, sizeof(prop));
13696     property_get("persist.denoise.process.plates", prop, "0");
13697     int processPlate = atoi(prop);
13698     switch(processPlate) {
13699     case 0:
13700         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13701     case 1:
13702         return CAM_WAVELET_DENOISE_CBCR_ONLY;
13703     case 2:
13704         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13705     case 3:
13706         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13707     default:
13708         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13709     }
13710 }
13711 
13712 
13713 /*===========================================================================
13714  * FUNCTION   : getTemporalDenoiseProcessPlate
13715  *
13716  * DESCRIPTION: query temporal denoise process plate
13717  *
13718  * PARAMETERS : None
13719  *
13720  * RETURN     : TNR prcocess plate value
13721  *==========================================================================*/
getTemporalDenoiseProcessPlate()13722 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13723 {
13724     char prop[PROPERTY_VALUE_MAX];
13725     memset(prop, 0, sizeof(prop));
13726     property_get("persist.tnr.process.plates", prop, "0");
13727     int processPlate = atoi(prop);
13728     switch(processPlate) {
13729     case 0:
13730         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13731     case 1:
13732         return CAM_WAVELET_DENOISE_CBCR_ONLY;
13733     case 2:
13734         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13735     case 3:
13736         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13737     default:
13738         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13739     }
13740 }
13741 
13742 
13743 /*===========================================================================
13744  * FUNCTION   : extractSceneMode
13745  *
13746  * DESCRIPTION: Extract scene mode from frameworks set metadata
13747  *
13748  * PARAMETERS :
13749  *      @frame_settings: CameraMetadata reference
13750  *      @metaMode: ANDROID_CONTORL_MODE
13751  *      @hal_metadata: hal metadata structure
13752  *
13753  * RETURN     : None
13754  *==========================================================================*/
extractSceneMode(const CameraMetadata & frame_settings,uint8_t metaMode,metadata_buffer_t * hal_metadata)13755 int32_t QCamera3HardwareInterface::extractSceneMode(
13756         const CameraMetadata &frame_settings, uint8_t metaMode,
13757         metadata_buffer_t *hal_metadata)
13758 {
13759     int32_t rc = NO_ERROR;
13760     uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13761 
13762     if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13763         LOGD("Ignoring control mode OFF_KEEP_STATE");
13764         return NO_ERROR;
13765     }
13766 
13767     if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13768         camera_metadata_ro_entry entry =
13769                 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13770         if (0 == entry.count)
13771             return rc;
13772 
13773         uint8_t fwk_sceneMode = entry.data.u8[0];
13774 
13775         int val = lookupHalName(SCENE_MODES_MAP,
13776                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13777                 fwk_sceneMode);
13778         if (NAME_NOT_FOUND != val) {
13779             sceneMode = (uint8_t)val;
13780             LOGD("sceneMode: %d", sceneMode);
13781         }
13782     }
13783 
13784     if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13785         rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13786     }
13787 
13788     if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13789         if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
13790             cam_hdr_param_t hdr_params;
13791             hdr_params.hdr_enable = 1;
13792             hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13793             hdr_params.hdr_need_1x = false;
13794             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13795                     CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13796                 rc = BAD_VALUE;
13797             }
13798         }
13799 
13800         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13801                 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13802             rc = BAD_VALUE;
13803         }
13804     }
13805 
13806     if (mForceHdrSnapshot) {
13807         cam_hdr_param_t hdr_params;
13808         hdr_params.hdr_enable = 1;
13809         hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13810         hdr_params.hdr_need_1x = false;
13811         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13812                 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13813             rc = BAD_VALUE;
13814         }
13815     }
13816 
13817     return rc;
13818 }
13819 
13820 /*===========================================================================
13821  * FUNCTION   : setVideoHdrMode
13822  *
13823  * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13824  *
13825  * PARAMETERS :
13826  *      @hal_metadata: hal metadata structure
13827  *      @metaMode: QCAMERA3_VIDEO_HDR_MODE
13828  *
13829  * RETURN     : None
13830  *==========================================================================*/
setVideoHdrMode(metadata_buffer_t * hal_metadata,cam_video_hdr_mode_t vhdr)13831 int32_t QCamera3HardwareInterface::setVideoHdrMode(
13832         metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13833 {
13834     if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13835         return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13836     }
13837 
13838     LOGE("Invalid Video HDR mode %d!", vhdr);
13839     return BAD_VALUE;
13840 }
13841 
13842 /*===========================================================================
13843  * FUNCTION   : setSensorHDR
13844  *
13845  * DESCRIPTION: Enable/disable sensor HDR.
13846  *
13847  * PARAMETERS :
13848  *      @hal_metadata: hal metadata structure
13849  *      @enable: boolean whether to enable/disable sensor HDR
13850  *
13851  * RETURN     : None
13852  *==========================================================================*/
setSensorHDR(metadata_buffer_t * hal_metadata,bool enable,bool isVideoHdrEnable)13853 int32_t QCamera3HardwareInterface::setSensorHDR(
13854         metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13855 {
13856     int32_t rc = NO_ERROR;
13857     cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13858 
13859     if (enable) {
13860         char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13861         memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13862         #ifdef _LE_CAMERA_
13863         //Default to staggered HDR for IOT
13864         property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13865         #else
13866         property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13867         #endif
13868         sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13869     }
13870 
13871     bool isSupported = false;
13872     switch (sensor_hdr) {
13873         case CAM_SENSOR_HDR_IN_SENSOR:
13874             if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13875                     CAM_QCOM_FEATURE_SENSOR_HDR) {
13876                 isSupported = true;
13877                 LOGD("Setting HDR mode In Sensor");
13878             }
13879             break;
13880         case CAM_SENSOR_HDR_ZIGZAG:
13881             if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13882                     CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13883                 isSupported = true;
13884                 LOGD("Setting HDR mode Zigzag");
13885             }
13886             break;
13887         case CAM_SENSOR_HDR_STAGGERED:
13888             if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13889                     CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13890                 isSupported = true;
13891                 LOGD("Setting HDR mode Staggered");
13892             }
13893             break;
13894         case CAM_SENSOR_HDR_OFF:
13895             isSupported = true;
13896             LOGD("Turning off sensor HDR");
13897             break;
13898         default:
13899             LOGE("HDR mode %d not supported", sensor_hdr);
13900             rc = BAD_VALUE;
13901             break;
13902     }
13903 
13904     if(isSupported) {
13905         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13906                 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13907             rc = BAD_VALUE;
13908         } else {
13909             if(!isVideoHdrEnable)
13910                 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
13911         }
13912     }
13913     return rc;
13914 }
13915 
13916 /*===========================================================================
13917  * FUNCTION   : needRotationReprocess
13918  *
13919  * DESCRIPTION: if rotation needs to be done by reprocess in pp
13920  *
13921  * PARAMETERS : none
13922  *
13923  * RETURN     : true: needed
13924  *              false: no need
13925  *==========================================================================*/
needRotationReprocess()13926 bool QCamera3HardwareInterface::needRotationReprocess()
13927 {
13928     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13929         // current rotation is not zero, and pp has the capability to process rotation
13930         LOGH("need do reprocess for rotation");
13931         return true;
13932     }
13933 
13934     return false;
13935 }
13936 
13937 /*===========================================================================
13938  * FUNCTION   : needReprocess
13939  *
13940  * DESCRIPTION: if reprocess in needed
13941  *
13942  * PARAMETERS : none
13943  *
13944  * RETURN     : true: needed
13945  *              false: no need
13946  *==========================================================================*/
needReprocess(cam_feature_mask_t postprocess_mask)13947 bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13948 {
13949     if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13950         // TODO: add for ZSL HDR later
13951         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13952         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13953             LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13954             return true;
13955         } else {
13956             LOGH("already post processed frame");
13957             return false;
13958         }
13959     }
13960     return needRotationReprocess();
13961 }
13962 
13963 /*===========================================================================
13964  * FUNCTION   : needJpegExifRotation
13965  *
13966  * DESCRIPTION: if rotation from jpeg is needed
13967  *
13968  * PARAMETERS : none
13969  *
13970  * RETURN     : true: needed
13971  *              false: no need
13972  *==========================================================================*/
needJpegExifRotation()13973 bool QCamera3HardwareInterface::needJpegExifRotation()
13974 {
13975     /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
13976     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13977        LOGD("Need use Jpeg EXIF Rotation");
13978        return true;
13979     }
13980     return false;
13981 }
13982 
13983 /*===========================================================================
13984  * FUNCTION   : addOfflineReprocChannel
13985  *
13986  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13987  *              coming from input channel
13988  *
13989  * PARAMETERS :
13990  *   @config  : reprocess configuration
13991  *   @inputChHandle : pointer to the input (source) channel
13992  *
13993  *
13994  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
13995  *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3ProcessingChannel * inputChHandle)13996 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13997         const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13998 {
13999     int32_t rc = NO_ERROR;
14000     QCamera3ReprocessChannel *pChannel = NULL;
14001 
14002     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
14003             mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
14004             config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
14005     if (NULL == pChannel) {
14006         LOGE("no mem for reprocess channel");
14007         return NULL;
14008     }
14009 
14010     rc = pChannel->initialize(IS_TYPE_NONE);
14011     if (rc != NO_ERROR) {
14012         LOGE("init reprocess channel failed, ret = %d", rc);
14013         delete pChannel;
14014         return NULL;
14015     }
14016 
14017     // pp feature config
14018     cam_pp_feature_config_t pp_config;
14019     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
14020 
14021     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
14022     if (gCamCapability[mCameraId]->qcom_supported_feature_mask
14023             & CAM_QCOM_FEATURE_DSDN) {
14024         //Use CPP CDS incase h/w supports it.
14025         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
14026         pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
14027     }
14028     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
14029         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
14030     }
14031 
14032     if (config.hdr_param.hdr_enable) {
14033         pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
14034         pp_config.hdr_param = config.hdr_param;
14035     }
14036 
14037     if (mForceHdrSnapshot) {
14038         pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
14039         pp_config.hdr_param.hdr_enable = 1;
14040         pp_config.hdr_param.hdr_need_1x = 0;
14041         pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
14042     }
14043 
14044     rc = pChannel->addReprocStreamsFromSource(pp_config,
14045             config,
14046             IS_TYPE_NONE,
14047             mMetadataChannel);
14048 
14049     if (rc != NO_ERROR) {
14050         delete pChannel;
14051         return NULL;
14052     }
14053     return pChannel;
14054 }
14055 
14056 /*===========================================================================
14057  * FUNCTION   : getMobicatMask
14058  *
14059  * DESCRIPTION: returns mobicat mask
14060  *
14061  * PARAMETERS : none
14062  *
14063  * RETURN     : mobicat mask
14064  *
14065  *==========================================================================*/
getMobicatMask()14066 uint8_t QCamera3HardwareInterface::getMobicatMask()
14067 {
14068     return m_MobicatMask;
14069 }
14070 
14071 /*===========================================================================
14072  * FUNCTION   : setMobicat
14073  *
14074  * DESCRIPTION: set Mobicat on/off.
14075  *
14076  * PARAMETERS :
14077  *   @params  : none
14078  *
14079  * RETURN     : int32_t type of status
14080  *              NO_ERROR  -- success
14081  *              none-zero failure code
14082  *==========================================================================*/
setMobicat()14083 int32_t QCamera3HardwareInterface::setMobicat()
14084 {
14085     int32_t ret = NO_ERROR;
14086 
14087     if (m_MobicatMask) {
14088         tune_cmd_t tune_cmd;
14089         tune_cmd.type = SET_RELOAD_CHROMATIX;
14090         tune_cmd.module = MODULE_ALL;
14091         tune_cmd.value = TRUE;
14092         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14093                 CAM_INTF_PARM_SET_VFE_COMMAND,
14094                 tune_cmd);
14095 
14096         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14097                 CAM_INTF_PARM_SET_PP_COMMAND,
14098                 tune_cmd);
14099     }
14100 
14101     return ret;
14102 }
14103 
14104 /*===========================================================================
14105 * FUNCTION   : getLogLevel
14106 *
14107 * DESCRIPTION: Reads the log level property into a variable
14108 *
14109 * PARAMETERS :
14110 *   None
14111 *
14112 * RETURN     :
14113 *   None
14114 *==========================================================================*/
getLogLevel()14115 void QCamera3HardwareInterface::getLogLevel()
14116 {
14117     char prop[PROPERTY_VALUE_MAX];
14118     uint32_t globalLogLevel = 0;
14119 
14120     property_get("persist.camera.hal.debug", prop, "0");
14121     int val = atoi(prop);
14122     if (0 <= val) {
14123         gCamHal3LogLevel = (uint32_t)val;
14124     }
14125 
14126     property_get("persist.camera.kpi.debug", prop, "0");
14127     gKpiDebugLevel = atoi(prop);
14128 
14129     property_get("persist.camera.global.debug", prop, "0");
14130     val = atoi(prop);
14131     if (0 <= val) {
14132         globalLogLevel = (uint32_t)val;
14133     }
14134 
14135     /* Highest log level among hal.logs and global.logs is selected */
14136     if (gCamHal3LogLevel < globalLogLevel)
14137         gCamHal3LogLevel = globalLogLevel;
14138 
14139     return;
14140 }
14141 
14142 /*===========================================================================
14143  * FUNCTION   : validateStreamRotations
14144  *
14145  * DESCRIPTION: Check if the rotations requested are supported
14146  *
14147  * PARAMETERS :
14148  *   @stream_list : streams to be configured
14149  *
14150  * RETURN     : NO_ERROR on success
14151  *              -EINVAL on failure
14152  *
14153  *==========================================================================*/
validateStreamRotations(camera3_stream_configuration_t * streamList)14154 int QCamera3HardwareInterface::validateStreamRotations(
14155         camera3_stream_configuration_t *streamList)
14156 {
14157     int rc = NO_ERROR;
14158 
14159     /*
14160     * Loop through all streams requested in configuration
14161     * Check if unsupported rotations have been requested on any of them
14162     */
14163     for (size_t j = 0; j < streamList->num_streams; j++){
14164         camera3_stream_t *newStream = streamList->streams[j];
14165 
14166         switch(newStream->rotation) {
14167             case CAMERA3_STREAM_ROTATION_0:
14168             case CAMERA3_STREAM_ROTATION_90:
14169             case CAMERA3_STREAM_ROTATION_180:
14170             case CAMERA3_STREAM_ROTATION_270:
14171                 //Expected values
14172                 break;
14173             default:
14174                 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
14175                         "type:%d and stream format:%d", __func__,
14176                         newStream->rotation, newStream->stream_type,
14177                         newStream->format);
14178                 return -EINVAL;
14179         }
14180 
14181         bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
14182         bool isImplDef = (newStream->format ==
14183                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
14184         bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
14185                 isImplDef);
14186 
14187         if (isRotated && (!isImplDef || isZsl)) {
14188             LOGE("Error: Unsupported rotation of %d requested for stream"
14189                     "type:%d and stream format:%d",
14190                     newStream->rotation, newStream->stream_type,
14191                     newStream->format);
14192             rc = -EINVAL;
14193             break;
14194         }
14195     }
14196 
14197     return rc;
14198 }
14199 
14200 /*===========================================================================
14201 * FUNCTION   : getFlashInfo
14202 *
14203 * DESCRIPTION: Retrieve information about whether the device has a flash.
14204 *
14205 * PARAMETERS :
14206 *   @cameraId  : Camera id to query
14207 *   @hasFlash  : Boolean indicating whether there is a flash device
14208 *                associated with given camera
14209 *   @flashNode : If a flash device exists, this will be its device node.
14210 *
14211 * RETURN     :
14212 *   None
14213 *==========================================================================*/
getFlashInfo(const int cameraId,bool & hasFlash,char (& flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])14214 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14215         bool& hasFlash,
14216         char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14217 {
14218     cam_capability_t* camCapability = gCamCapability[cameraId];
14219     if (NULL == camCapability) {
14220         hasFlash = false;
14221         flashNode[0] = '\0';
14222     } else {
14223         hasFlash = camCapability->flash_available;
14224         strlcpy(flashNode,
14225                 (char*)camCapability->flash_dev_name,
14226                 QCAMERA_MAX_FILEPATH_LENGTH);
14227     }
14228 }
14229 
14230 /*===========================================================================
14231 * FUNCTION   : getEepromVersionInfo
14232 *
14233 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
14234 *
14235 * PARAMETERS : None
14236 *
14237 * RETURN     : string describing EEPROM version
14238 *              "\0" if no such info available
14239 *==========================================================================*/
getEepromVersionInfo()14240 const char *QCamera3HardwareInterface::getEepromVersionInfo()
14241 {
14242     return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14243 }
14244 
14245 /*===========================================================================
14246 * FUNCTION   : getLdafCalib
14247 *
14248 * DESCRIPTION: Retrieve Laser AF calibration data
14249 *
14250 * PARAMETERS : None
14251 *
14252 * RETURN     : Two uint32_t describing laser AF calibration data
14253 *              NULL if none is available.
14254 *==========================================================================*/
getLdafCalib()14255 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14256 {
14257     if (mLdafCalibExist) {
14258         return &mLdafCalib[0];
14259     } else {
14260         return NULL;
14261     }
14262 }
14263 
14264 /*===========================================================================
14265 * FUNCTION   : getEaselFwVersion
14266 *
14267 * DESCRIPTION: Retrieve Easel firmware version
14268 *
14269 * PARAMETERS : None
14270 *
14271 * RETURN     : string describing Firmware version
14272 *              "\0" if version is not up to date
14273 *==========================================================================*/
getEaselFwVersion()14274 const char *QCamera3HardwareInterface::getEaselFwVersion()
14275 {
14276     if (mEaselFwUpdated) {
14277         return (const char *)&mEaselFwVersion[0];
14278     } else {
14279         return NULL;
14280     }
14281 }
14282 
14283 /*===========================================================================
14284  * FUNCTION   : dynamicUpdateMetaStreamInfo
14285  *
14286  * DESCRIPTION: This function:
14287  *             (1) stops all the channels
14288  *             (2) returns error on pending requests and buffers
14289  *             (3) sends metastream_info in setparams
14290  *             (4) starts all channels
14291  *             This is useful when sensor has to be restarted to apply any
14292  *             settings such as frame rate from a different sensor mode
14293  *
14294  * PARAMETERS : None
14295  *
14296  * RETURN     : NO_ERROR on success
14297  *              Error codes on failure
14298  *
14299  *==========================================================================*/
dynamicUpdateMetaStreamInfo()14300 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14301 {
14302     ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
14303     int rc = NO_ERROR;
14304 
14305     LOGD("E");
14306 
14307     rc = stopAllChannels();
14308     if (rc < 0) {
14309         LOGE("stopAllChannels failed");
14310         return rc;
14311     }
14312 
14313     rc = notifyErrorForPendingRequests();
14314     if (rc < 0) {
14315         LOGE("notifyErrorForPendingRequests failed");
14316         return rc;
14317     }
14318 
14319     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14320         LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14321                 "Format:%d",
14322                 mStreamConfigInfo.type[i],
14323                 mStreamConfigInfo.stream_sizes[i].width,
14324                 mStreamConfigInfo.stream_sizes[i].height,
14325                 mStreamConfigInfo.postprocess_mask[i],
14326                 mStreamConfigInfo.format[i]);
14327     }
14328 
14329     /* Send meta stream info once again so that ISP can start */
14330     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14331             CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14332     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14333             mParameters);
14334     if (rc < 0) {
14335         LOGE("set Metastreaminfo failed. Sensor mode does not change");
14336     }
14337 
14338     rc = startAllChannels();
14339     if (rc < 0) {
14340         LOGE("startAllChannels failed");
14341         return rc;
14342     }
14343 
14344     LOGD("X");
14345     return rc;
14346 }
14347 
14348 /*===========================================================================
14349  * FUNCTION   : stopAllChannels
14350  *
14351  * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14352  *
14353  * PARAMETERS : None
14354  *
14355  * RETURN     : NO_ERROR on success
14356  *              Error codes on failure
14357  *
14358  *==========================================================================*/
stopAllChannels()14359 int32_t QCamera3HardwareInterface::stopAllChannels()
14360 {
14361     int32_t rc = NO_ERROR;
14362 
14363     LOGD("Stopping all channels");
14364     // Stop the Streams/Channels
14365     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14366         it != mStreamInfo.end(); it++) {
14367         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14368         if (channel) {
14369             channel->stop();
14370         }
14371         (*it)->status = INVALID;
14372     }
14373 
14374     if (mSupportChannel) {
14375         mSupportChannel->stop();
14376     }
14377     if (mAnalysisChannel) {
14378         mAnalysisChannel->stop();
14379     }
14380     if (mRawDumpChannel) {
14381         mRawDumpChannel->stop();
14382     }
14383     if (mHdrPlusRawSrcChannel) {
14384         mHdrPlusRawSrcChannel->stop();
14385     }
14386     if (mMetadataChannel) {
14387         /* If content of mStreamInfo is not 0, there is metadata stream */
14388         mMetadataChannel->stop();
14389     }
14390 
14391     LOGD("All channels stopped");
14392     return rc;
14393 }
14394 
14395 /*===========================================================================
14396  * FUNCTION   : startAllChannels
14397  *
14398  * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14399  *
14400  * PARAMETERS : None
14401  *
14402  * RETURN     : NO_ERROR on success
14403  *              Error codes on failure
14404  *
14405  *==========================================================================*/
startAllChannels()14406 int32_t QCamera3HardwareInterface::startAllChannels()
14407 {
14408     int32_t rc = NO_ERROR;
14409 
14410     LOGD("Start all channels ");
14411     // Start the Streams/Channels
14412     if (mMetadataChannel) {
14413         /* If content of mStreamInfo is not 0, there is metadata stream */
14414         rc = mMetadataChannel->start();
14415         if (rc < 0) {
14416             LOGE("META channel start failed");
14417             return rc;
14418         }
14419     }
14420     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14421         it != mStreamInfo.end(); it++) {
14422         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14423         if (channel) {
14424             rc = channel->start();
14425             if (rc < 0) {
14426                 LOGE("channel start failed");
14427                 return rc;
14428             }
14429         }
14430     }
14431     if (mAnalysisChannel) {
14432         mAnalysisChannel->start();
14433     }
14434     if (mSupportChannel) {
14435         rc = mSupportChannel->start();
14436         if (rc < 0) {
14437             LOGE("Support channel start failed");
14438             return rc;
14439         }
14440     }
14441     if (mRawDumpChannel) {
14442         rc = mRawDumpChannel->start();
14443         if (rc < 0) {
14444             LOGE("RAW dump channel start failed");
14445             return rc;
14446         }
14447     }
14448     if (mHdrPlusRawSrcChannel) {
14449         rc = mHdrPlusRawSrcChannel->start();
14450         if (rc < 0) {
14451             LOGE("HDR+ RAW channel start failed");
14452             return rc;
14453         }
14454     }
14455 
14456     LOGD("All channels started");
14457     return rc;
14458 }
14459 
14460 /*===========================================================================
14461  * FUNCTION   : notifyErrorForPendingRequests
14462  *
14463  * DESCRIPTION: This function sends error for all the pending requests/buffers
14464  *
14465  * PARAMETERS : None
14466  *
14467  * RETURN     : Error codes
14468  *              NO_ERROR on success
14469  *
14470  *==========================================================================*/
notifyErrorForPendingRequests()14471 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14472 {
14473     notifyErrorFoPendingDepthData(mDepthChannel);
14474 
14475     auto pendingRequest = mPendingRequestsList.begin();
14476     auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14477 
14478     // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14479     // buffers (for which buffers aren't sent yet).
14480     while (pendingRequest != mPendingRequestsList.end() ||
14481            pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14482         if (pendingRequest == mPendingRequestsList.end() ||
14483             pendingBuffer->frame_number < pendingRequest->frame_number) {
14484             // If metadata for this frame was sent, notify about a buffer error and returns buffers
14485             // with error.
14486             for (auto &info : pendingBuffer->mPendingBufferList) {
14487                 // Send a buffer error for this frame number.
14488                 camera3_notify_msg_t notify_msg;
14489                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14490                 notify_msg.type = CAMERA3_MSG_ERROR;
14491                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14492                 notify_msg.message.error.error_stream = info.stream;
14493                 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14494                 orchestrateNotify(&notify_msg);
14495 
14496                 camera3_stream_buffer_t buffer = {};
14497                 buffer.acquire_fence = -1;
14498                 buffer.release_fence = -1;
14499                 buffer.buffer = info.buffer;
14500                 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14501                 buffer.stream = info.stream;
14502                 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14503             }
14504 
14505             pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14506         } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14507                    pendingBuffer->frame_number > pendingRequest->frame_number) {
14508             // If the buffers for this frame were sent already, notify about a result error.
14509             camera3_notify_msg_t notify_msg;
14510             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14511             notify_msg.type = CAMERA3_MSG_ERROR;
14512             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14513             notify_msg.message.error.error_stream = nullptr;
14514             notify_msg.message.error.frame_number = pendingRequest->frame_number;
14515             orchestrateNotify(&notify_msg);
14516 
14517             if (pendingRequest->input_buffer != nullptr) {
14518                 camera3_capture_result result = {};
14519                 result.frame_number = pendingRequest->frame_number;
14520                 result.result = nullptr;
14521                 result.input_buffer = pendingRequest->input_buffer;
14522                 orchestrateResult(&result);
14523             }
14524 
14525             mShutterDispatcher.clear(pendingRequest->frame_number);
14526             pendingRequest = mPendingRequestsList.erase(pendingRequest);
14527         } else {
14528             // If both buffers and result metadata weren't sent yet, notify about a request error
14529             // and return buffers with error.
14530             for (auto &info : pendingBuffer->mPendingBufferList) {
14531                 camera3_notify_msg_t notify_msg;
14532                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14533                 notify_msg.type = CAMERA3_MSG_ERROR;
14534                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14535                 notify_msg.message.error.error_stream = info.stream;
14536                 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14537                 orchestrateNotify(&notify_msg);
14538 
14539                 camera3_stream_buffer_t buffer = {};
14540                 buffer.acquire_fence = -1;
14541                 buffer.release_fence = -1;
14542                 buffer.buffer = info.buffer;
14543                 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14544                 buffer.stream = info.stream;
14545                 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14546             }
14547 
14548             if (pendingRequest->input_buffer != nullptr) {
14549                 camera3_capture_result result = {};
14550                 result.frame_number = pendingRequest->frame_number;
14551                 result.result = nullptr;
14552                 result.input_buffer = pendingRequest->input_buffer;
14553                 orchestrateResult(&result);
14554             }
14555 
14556             mShutterDispatcher.clear(pendingRequest->frame_number);
14557             pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14558             pendingRequest = mPendingRequestsList.erase(pendingRequest);
14559         }
14560     }
14561 
14562     /* Reset pending frame Drop list and requests list */
14563     mPendingFrameDropList.clear();
14564     mShutterDispatcher.clear();
14565     mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
14566     mPendingBuffersMap.mPendingBuffersInRequest.clear();
14567     mExpectedFrameDuration = 0;
14568     mExpectedInflightDuration = 0;
14569     LOGH("Cleared all the pending buffers ");
14570 
14571     return NO_ERROR;
14572 }
14573 
isOnEncoder(const cam_dimension_t max_viewfinder_size,uint32_t width,uint32_t height)14574 bool QCamera3HardwareInterface::isOnEncoder(
14575         const cam_dimension_t max_viewfinder_size,
14576         uint32_t width, uint32_t height)
14577 {
14578     return ((width > (uint32_t)max_viewfinder_size.width) ||
14579             (height > (uint32_t)max_viewfinder_size.height) ||
14580             (width > (uint32_t)VIDEO_4K_WIDTH) ||
14581             (height > (uint32_t)VIDEO_4K_HEIGHT));
14582 }
14583 
14584 /*===========================================================================
14585  * FUNCTION   : setBundleInfo
14586  *
14587  * DESCRIPTION: Set bundle info for all streams that are bundle.
14588  *
14589  * PARAMETERS : None
14590  *
14591  * RETURN     : NO_ERROR on success
14592  *              Error codes on failure
14593  *==========================================================================*/
setBundleInfo()14594 int32_t QCamera3HardwareInterface::setBundleInfo()
14595 {
14596     int32_t rc = NO_ERROR;
14597 
14598     if (mChannelHandle) {
14599         cam_bundle_config_t bundleInfo;
14600         memset(&bundleInfo, 0, sizeof(bundleInfo));
14601         rc = mCameraHandle->ops->get_bundle_info(
14602                 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14603         if (rc != NO_ERROR) {
14604             LOGE("get_bundle_info failed");
14605             return rc;
14606         }
14607         if (mAnalysisChannel) {
14608             mAnalysisChannel->setBundleInfo(bundleInfo);
14609         }
14610         if (mSupportChannel) {
14611             mSupportChannel->setBundleInfo(bundleInfo);
14612         }
14613         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14614                 it != mStreamInfo.end(); it++) {
14615             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14616             channel->setBundleInfo(bundleInfo);
14617         }
14618         if (mRawDumpChannel) {
14619             mRawDumpChannel->setBundleInfo(bundleInfo);
14620         }
14621         if (mHdrPlusRawSrcChannel) {
14622             mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14623         }
14624     }
14625 
14626     return rc;
14627 }
14628 
14629 /*===========================================================================
14630  * FUNCTION   : setInstantAEC
14631  *
14632  * DESCRIPTION: Set Instant AEC related params.
14633  *
14634  * PARAMETERS :
14635  *      @meta: CameraMetadata reference
14636  *
14637  * RETURN     : NO_ERROR on success
14638  *              Error codes on failure
14639  *==========================================================================*/
setInstantAEC(const CameraMetadata & meta)14640 int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14641 {
14642     int32_t rc = NO_ERROR;
14643     uint8_t val = 0;
14644     char prop[PROPERTY_VALUE_MAX];
14645 
14646     // First try to configure instant AEC from framework metadata
14647     if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14648         val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14649     }
14650 
14651     // If framework did not set this value, try to read from set prop.
14652     if (val == 0) {
14653         memset(prop, 0, sizeof(prop));
14654         property_get("persist.camera.instant.aec", prop, "0");
14655         val = (uint8_t)atoi(prop);
14656     }
14657 
14658     if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14659            ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14660         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14661         mInstantAEC = val;
14662         mInstantAECSettledFrameNumber = 0;
14663         mInstantAecFrameIdxCount = 0;
14664         LOGH("instantAEC value set %d",val);
14665         if (mInstantAEC) {
14666             memset(prop, 0, sizeof(prop));
14667             property_get("persist.camera.ae.instant.bound", prop, "10");
14668             int32_t aec_frame_skip_cnt = atoi(prop);
14669             if (aec_frame_skip_cnt >= 0) {
14670                 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14671             } else {
14672                 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14673                 rc = BAD_VALUE;
14674             }
14675         }
14676     } else {
14677         LOGE("Bad instant aec value set %d", val);
14678         rc = BAD_VALUE;
14679     }
14680     return rc;
14681 }
14682 
14683 /*===========================================================================
14684  * FUNCTION   : get_num_overall_buffers
14685  *
14686  * DESCRIPTION: Estimate number of pending buffers across all requests.
14687  *
14688  * PARAMETERS : None
14689  *
14690  * RETURN     : Number of overall pending buffers
14691  *
14692  *==========================================================================*/
get_num_overall_buffers()14693 uint32_t PendingBuffersMap::get_num_overall_buffers()
14694 {
14695     uint32_t sum_buffers = 0;
14696     for (auto &req : mPendingBuffersInRequest) {
14697         sum_buffers += req.mPendingBufferList.size();
14698     }
14699     return sum_buffers;
14700 }
14701 
14702 /*===========================================================================
14703  * FUNCTION   : removeBuf
14704  *
14705  * DESCRIPTION: Remove a matching buffer from tracker.
14706  *
14707  * PARAMETERS : @buffer: image buffer for the callback
14708  *
14709  * RETURN     : None
14710  *
14711  *==========================================================================*/
removeBuf(buffer_handle_t * buffer)14712 void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14713 {
14714     bool buffer_found = false;
14715     for (auto req = mPendingBuffersInRequest.begin();
14716             req != mPendingBuffersInRequest.end(); req++) {
14717         for (auto k = req->mPendingBufferList.begin();
14718                 k != req->mPendingBufferList.end(); k++ ) {
14719             if (k->buffer == buffer) {
14720                 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14721                         req->frame_number, buffer);
14722                 k = req->mPendingBufferList.erase(k);
14723                 if (req->mPendingBufferList.empty()) {
14724                     // Remove this request from Map
14725                     req = mPendingBuffersInRequest.erase(req);
14726                 }
14727                 buffer_found = true;
14728                 break;
14729             }
14730         }
14731         if (buffer_found) {
14732             break;
14733         }
14734     }
14735     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14736             get_num_overall_buffers());
14737 }
14738 
14739 /*===========================================================================
14740  * FUNCTION   : getBufErrStatus
14741  *
14742  * DESCRIPTION: get buffer error status
14743  *
14744  * PARAMETERS : @buffer: buffer handle
14745  *
14746  * RETURN     : Error status
14747  *
14748  *==========================================================================*/
getBufErrStatus(buffer_handle_t * buffer)14749 int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14750 {
14751     for (auto& req : mPendingBuffersInRequest) {
14752         for (auto& k : req.mPendingBufferList) {
14753             if (k.buffer == buffer)
14754                 return k.bufStatus;
14755         }
14756     }
14757     return CAMERA3_BUFFER_STATUS_OK;
14758 }
14759 
14760 /*===========================================================================
14761  * FUNCTION   : setPAAFSupport
14762  *
14763  * DESCRIPTION: Set the preview-assisted auto focus support bit in
14764  *              feature mask according to stream type and filter
14765  *              arrangement
14766  *
14767  * PARAMETERS : @feature_mask: current feature mask, which may be modified
14768  *              @stream_type: stream type
14769  *              @filter_arrangement: filter arrangement
14770  *
14771  * RETURN     : None
14772  *==========================================================================*/
setPAAFSupport(cam_feature_mask_t & feature_mask,cam_stream_type_t stream_type,cam_color_filter_arrangement_t filter_arrangement)14773 void QCamera3HardwareInterface::setPAAFSupport(
14774         cam_feature_mask_t& feature_mask,
14775         cam_stream_type_t stream_type,
14776         cam_color_filter_arrangement_t filter_arrangement)
14777 {
14778     switch (filter_arrangement) {
14779     case CAM_FILTER_ARRANGEMENT_RGGB:
14780     case CAM_FILTER_ARRANGEMENT_GRBG:
14781     case CAM_FILTER_ARRANGEMENT_GBRG:
14782     case CAM_FILTER_ARRANGEMENT_BGGR:
14783         if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14784                 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
14785                 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
14786             if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14787                 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14788         }
14789         break;
14790     case CAM_FILTER_ARRANGEMENT_Y:
14791         if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14792             feature_mask |= CAM_QCOM_FEATURE_PAAF;
14793         }
14794         break;
14795     default:
14796         break;
14797     }
14798     LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14799             feature_mask, stream_type, filter_arrangement);
14800 
14801 
14802 }
14803 
14804 /*===========================================================================
14805 * FUNCTION   : getSensorMountAngle
14806 *
14807 * DESCRIPTION: Retrieve sensor mount angle
14808 *
14809 * PARAMETERS : None
14810 *
14811 * RETURN     : sensor mount angle in uint32_t
14812 *==========================================================================*/
getSensorMountAngle()14813 uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14814 {
14815     return gCamCapability[mCameraId]->sensor_mount_angle;
14816 }
14817 
14818 /*===========================================================================
14819 * FUNCTION   : getRelatedCalibrationData
14820 *
14821 * DESCRIPTION: Retrieve related system calibration data
14822 *
14823 * PARAMETERS : None
14824 *
14825 * RETURN     : Pointer of related system calibration data
14826 *==========================================================================*/
getRelatedCalibrationData()14827 const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14828 {
14829     return (const cam_related_system_calibration_data_t *)
14830             &(gCamCapability[mCameraId]->related_cam_calibration);
14831 }
14832 
14833 /*===========================================================================
14834  * FUNCTION   : is60HzZone
14835  *
14836  * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14837  *
14838  * PARAMETERS : None
14839  *
14840  * RETURN     : True if in 60Hz zone, False otherwise
14841  *==========================================================================*/
is60HzZone()14842 bool QCamera3HardwareInterface::is60HzZone()
14843 {
14844     time_t t = time(NULL);
14845     struct tm lt;
14846 
14847     struct tm* r = localtime_r(&t, &lt);
14848 
14849     if (r == NULL || lt.tm_gmtoff <=  -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14850         return true;
14851     else
14852         return false;
14853 }
14854 
14855 /*===========================================================================
14856  * FUNCTION   : adjustBlackLevelForCFA
14857  *
14858  * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14859  *              of bayer CFA (Color Filter Array).
14860  *
14861  * PARAMETERS : @input: black level pattern in the order of RGGB
14862  *              @output: black level pattern in the order of CFA
14863  *              @color_arrangement: CFA color arrangement
14864  *
14865  * RETURN     : None
14866  *==========================================================================*/
14867 template<typename T>
adjustBlackLevelForCFA(T input[BLACK_LEVEL_PATTERN_CNT],T output[BLACK_LEVEL_PATTERN_CNT],cam_color_filter_arrangement_t color_arrangement)14868 void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14869         T input[BLACK_LEVEL_PATTERN_CNT],
14870         T output[BLACK_LEVEL_PATTERN_CNT],
14871         cam_color_filter_arrangement_t color_arrangement)
14872 {
14873     switch (color_arrangement) {
14874     case CAM_FILTER_ARRANGEMENT_GRBG:
14875         output[0] = input[1];
14876         output[1] = input[0];
14877         output[2] = input[3];
14878         output[3] = input[2];
14879         break;
14880     case CAM_FILTER_ARRANGEMENT_GBRG:
14881         output[0] = input[2];
14882         output[1] = input[3];
14883         output[2] = input[0];
14884         output[3] = input[1];
14885         break;
14886     case CAM_FILTER_ARRANGEMENT_BGGR:
14887         output[0] = input[3];
14888         output[1] = input[2];
14889         output[2] = input[1];
14890         output[3] = input[0];
14891         break;
14892     case CAM_FILTER_ARRANGEMENT_RGGB:
14893         output[0] = input[0];
14894         output[1] = input[1];
14895         output[2] = input[2];
14896         output[3] = input[3];
14897         break;
14898     default:
14899         LOGE("Invalid color arrangement to derive dynamic blacklevel");
14900         break;
14901     }
14902 }
14903 
updateHdrPlusResultMetadata(CameraMetadata & resultMetadata,std::shared_ptr<metadata_buffer_t> settings)14904 void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14905     CameraMetadata &resultMetadata,
14906     std::shared_ptr<metadata_buffer_t> settings)
14907 {
14908     if (settings == nullptr) {
14909         ALOGE("%s: settings is nullptr.", __FUNCTION__);
14910         return;
14911     }
14912 
14913     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14914         resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14915     } else {
14916         resultMetadata.erase(ANDROID_JPEG_GPS_COORDINATES);
14917     }
14918 
14919     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14920         String8 str((const char *)gps_methods);
14921         resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14922     } else {
14923         resultMetadata.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
14924     }
14925 
14926     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14927         resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14928     } else {
14929         resultMetadata.erase(ANDROID_JPEG_GPS_TIMESTAMP);
14930     }
14931 
14932     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14933         resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14934     } else {
14935         resultMetadata.erase(ANDROID_JPEG_ORIENTATION);
14936     }
14937 
14938     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14939         uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14940         resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14941     } else {
14942         resultMetadata.erase(ANDROID_JPEG_QUALITY);
14943     }
14944 
14945     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14946         uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14947         resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14948     } else {
14949         resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_QUALITY);
14950     }
14951 
14952     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14953         int32_t fwk_thumb_size[2];
14954         fwk_thumb_size[0] = thumb_size->width;
14955         fwk_thumb_size[1] = thumb_size->height;
14956         resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14957     } else {
14958         resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_SIZE);
14959     }
14960 
14961     IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14962         uint8_t fwk_intent = intent[0];
14963         resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14964     } else {
14965         resultMetadata.erase(ANDROID_CONTROL_CAPTURE_INTENT);
14966     }
14967 }
14968 
isRequestHdrPlusCompatible(const camera3_capture_request_t & request,const CameraMetadata & metadata)14969 bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14970         const camera3_capture_request_t &request, const CameraMetadata &metadata) {
14971     if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
14972             metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
14973         ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
14974         return false;
14975     }
14976 
14977     if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14978          metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14979             ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
14980         ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14981                 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
14982         return false;
14983     }
14984 
14985     if (!metadata.exists(ANDROID_EDGE_MODE) ||
14986             metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14987         ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14988         return false;
14989     }
14990 
14991     if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14992             metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14993                     ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14994         ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14995         return false;
14996     }
14997 
14998     if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14999             (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
15000              metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
15001                     ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
15002         ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
15003         return false;
15004     }
15005 
15006     if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
15007             metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
15008         ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
15009         return false;
15010     }
15011 
15012     if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
15013             metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
15014                     ANDROID_CONTROL_EFFECT_MODE_OFF) {
15015         ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
15016         return false;
15017     }
15018 
15019     if (!metadata.exists(ANDROID_CONTROL_MODE) ||
15020             (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
15021              metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
15022                     ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
15023         ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
15024         return false;
15025     }
15026 
15027     // TODO (b/66500626): support AE compensation.
15028     if (!metadata.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION) ||
15029             metadata.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0] != 0) {
15030         ALOGV("%s: ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION is not 0.", __FUNCTION__);
15031         return false;
15032     }
15033 
15034     // TODO (b/32585046): support non-ZSL.
15035     if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
15036          metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
15037         ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
15038         return false;
15039     }
15040 
15041     // TODO (b/32586081): support flash.
15042     if (!metadata.exists(ANDROID_FLASH_MODE) ||
15043          metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
15044         ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
15045         return false;
15046     }
15047 
15048     if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
15049          metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
15050         ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
15051         return false;
15052     }
15053 
15054     switch (request.output_buffers[0].stream->format) {
15055         case HAL_PIXEL_FORMAT_BLOB:
15056         case HAL_PIXEL_FORMAT_YCbCr_420_888:
15057         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15058             break;
15059         default:
15060             ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
15061             for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15062                 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
15063                         request.output_buffers[0].stream->width,
15064                         request.output_buffers[0].stream->height,
15065                         request.output_buffers[0].stream->format);
15066             }
15067             return false;
15068     }
15069 
15070     return true;
15071 }
15072 
abortPendingHdrplusRequest(HdrPlusPendingRequest * hdrPlusRequest)15073 void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
15074     if (hdrPlusRequest == nullptr) return;
15075 
15076     for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
15077         // Find the stream for this buffer.
15078         for (auto streamInfo : mStreamInfo) {
15079             if (streamInfo->id == outputBufferIter.first) {
15080                 if (streamInfo->channel == mPictureChannel) {
15081                     // For picture channel, this buffer is internally allocated so return this
15082                     // buffer to picture channel.
15083                     mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
15084                 } else {
15085                     // Unregister this buffer for other channels.
15086                     streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
15087                 }
15088                 break;
15089             }
15090         }
15091     }
15092 
15093     hdrPlusRequest->outputBuffers.clear();
15094     hdrPlusRequest->frameworkOutputBuffers.clear();
15095 }
15096 
trySubmittingHdrPlusRequestLocked(HdrPlusPendingRequest * hdrPlusRequest,const camera3_capture_request_t & request,const CameraMetadata & metadata)15097 bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
15098         HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
15099         const CameraMetadata &metadata)
15100 {
15101     if (hdrPlusRequest == nullptr) return false;
15102     if (!isRequestHdrPlusCompatible(request, metadata)) return false;
15103 
15104     status_t res = OK;
15105     pbcamera::CaptureRequest pbRequest;
15106     pbRequest.id = request.frame_number;
15107     // Iterate through all requested output buffers and add them to an HDR+ request.
15108     for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15109         // Find the index of the stream in mStreamInfo.
15110         uint32_t pbStreamId = 0;
15111         bool found = false;
15112         for (auto streamInfo : mStreamInfo) {
15113             if (streamInfo->stream == request.output_buffers[i].stream) {
15114                 pbStreamId = streamInfo->id;
15115                 found = true;
15116                 break;
15117             }
15118         }
15119 
15120         if (!found) {
15121             ALOGE("%s: requested stream was not configured.", __FUNCTION__);
15122             abortPendingHdrplusRequest(hdrPlusRequest);
15123             return false;
15124         }
15125         auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
15126         switch (request.output_buffers[i].stream->format) {
15127             case HAL_PIXEL_FORMAT_BLOB:
15128             {
15129                 // For jpeg output, get a YUV buffer from pic channel.
15130                 QCamera3PicChannel *picChannel =
15131                         (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
15132                 res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
15133                 if (res != OK) {
15134                     ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
15135                             __FUNCTION__, strerror(-res), res);
15136                     abortPendingHdrplusRequest(hdrPlusRequest);
15137                     return false;
15138                 }
15139                 break;
15140             }
15141             case HAL_PIXEL_FORMAT_YCbCr_420_888:
15142             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15143             {
15144                 // For YUV output, register the buffer and get the buffer def from the channel.
15145                 QCamera3ProcessingChannel *channel =
15146                         (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
15147                 res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
15148                         outBuffer.get());
15149                 if (res != OK) {
15150                     ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
15151                             strerror(-res), res);
15152                     abortPendingHdrplusRequest(hdrPlusRequest);
15153                     return false;
15154                 }
15155                 break;
15156             }
15157             default:
15158                 abortPendingHdrplusRequest(hdrPlusRequest);
15159                 return false;
15160         }
15161 
15162         pbcamera::StreamBuffer buffer;
15163         buffer.streamId = pbStreamId;
15164         buffer.dmaBufFd = outBuffer->fd;
15165         buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
15166         buffer.dataSize = outBuffer->frame_len;
15167 
15168         pbRequest.outputBuffers.push_back(buffer);
15169 
15170         hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
15171         hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
15172     }
15173 
15174     // Submit an HDR+ capture request to HDR+ service.
15175     res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
15176     if (res != OK) {
15177         ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
15178                 strerror(-res), res);
15179         abortPendingHdrplusRequest(hdrPlusRequest);
15180         return false;
15181     }
15182 
15183     return true;
15184 }
15185 
openHdrPlusClientAsyncLocked()15186 status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
15187 {
15188     if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
15189         return OK;
15190     }
15191 
15192     status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
15193     if (res != OK) {
15194         ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
15195                 strerror(-res), res);
15196         return res;
15197     }
15198     gHdrPlusClientOpening = true;
15199 
15200     return OK;
15201 }
15202 
enableHdrPlusModeLocked()15203 status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
15204 {
15205     status_t res;
15206 
15207     if (mHdrPlusModeEnabled) {
15208         return OK;
15209     }
15210 
15211     // Check if gHdrPlusClient is opened or being opened.
15212     if (gHdrPlusClient == nullptr) {
15213         if (gHdrPlusClientOpening) {
15214             // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
15215             return OK;
15216         }
15217 
15218         res = openHdrPlusClientAsyncLocked();
15219         if (res != OK) {
15220             ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
15221                     strerror(-res), res);
15222             return res;
15223         }
15224 
15225         // When opening HDR+ client completes, HDR+ mode will be enabled.
15226         return OK;
15227 
15228     }
15229 
15230     // Configure stream for HDR+.
15231     res = configureHdrPlusStreamsLocked();
15232     if (res != OK) {
15233         LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
15234         return res;
15235     }
15236 
15237     // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
15238     res = gHdrPlusClient->setZslHdrPlusMode(true);
15239     if (res != OK) {
15240         LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15241         return res;
15242     }
15243 
15244     mHdrPlusModeEnabled = true;
15245     ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
15246 
15247     return OK;
15248 }
15249 
finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> & lock)15250 void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
15251 {
15252     if (gHdrPlusClientOpening) {
15253         gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
15254     }
15255 }
15256 
disableHdrPlusModeLocked()15257 void QCamera3HardwareInterface::disableHdrPlusModeLocked()
15258 {
15259     // Disable HDR+ mode.
15260     if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
15261         status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
15262         if (res != OK) {
15263             ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15264         }
15265 
15266         // Close HDR+ client so Easel can enter low power mode.
15267         gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
15268         gHdrPlusClient = nullptr;
15269     }
15270 
15271     mHdrPlusModeEnabled = false;
15272     ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
15273 }
15274 
isSessionHdrPlusModeCompatible()15275 bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
15276 {
15277     // Check that at least one YUV or one JPEG output is configured.
15278     // TODO: Support RAW (b/36690506)
15279     for (auto streamInfo : mStreamInfo) {
15280         if (streamInfo != nullptr && streamInfo->stream != nullptr) {
15281             if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
15282                     (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
15283                      streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15284                      streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
15285                 return true;
15286             }
15287         }
15288     }
15289 
15290     return false;
15291 }
15292 
configureHdrPlusStreamsLocked()15293 status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
15294 {
15295     pbcamera::InputConfiguration inputConfig;
15296     std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
15297     status_t res = OK;
15298 
15299     // Sensor MIPI will send data to Easel.
15300     inputConfig.isSensorInput = true;
15301     inputConfig.sensorMode.cameraId = mCameraId;
15302     inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
15303     inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
15304     inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
15305     inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
15306     inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
15307     inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
15308     inputConfig.sensorMode.timestampCropOffsetNs = mSensorModeInfo.timestamp_crop_offset;
15309 
15310     if (mSensorModeInfo.num_raw_bits != 10) {
15311         ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
15312                 mSensorModeInfo.num_raw_bits);
15313         return BAD_VALUE;
15314     }
15315 
15316     inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
15317 
15318     // Iterate through configured output streams in HAL and configure those streams in HDR+
15319     // service.
15320     for (auto streamInfo : mStreamInfo) {
15321         pbcamera::StreamConfiguration outputConfig;
15322         if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
15323             switch (streamInfo->stream->format) {
15324                 case HAL_PIXEL_FORMAT_BLOB:
15325                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15326                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15327                     res = fillPbStreamConfig(&outputConfig, streamInfo->id,
15328                             streamInfo->channel, /*stream index*/0);
15329                     if (res != OK) {
15330                         LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15331                             __FUNCTION__, strerror(-res), res);
15332 
15333                         return res;
15334                     }
15335 
15336                     outputStreamConfigs.push_back(outputConfig);
15337                     break;
15338                 default:
15339                     // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
15340                     break;
15341             }
15342         }
15343     }
15344 
15345     res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
15346     if (res != OK) {
15347         LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15348             strerror(-res), res);
15349         return res;
15350     }
15351 
15352     return OK;
15353 }
15354 
handleEaselFatalError()15355 void QCamera3HardwareInterface::handleEaselFatalError()
15356 {
15357     pthread_mutex_lock(&mMutex);
15358     mState = ERROR;
15359     pthread_mutex_unlock(&mMutex);
15360 
15361     handleCameraDeviceError(/*stopChannelImmediately*/true);
15362 }
15363 
handleEaselFatalErrorAsync()15364 void QCamera3HardwareInterface::handleEaselFatalErrorAsync()
15365 {
15366     if (mEaselErrorFuture.valid()) {
15367         // The error future has been invoked.
15368         return;
15369     }
15370 
15371     // Launch a future to handle the fatal error.
15372     mEaselErrorFuture = std::async(std::launch::async,
15373             &QCamera3HardwareInterface::handleEaselFatalError, this);
15374 }
15375 
onEaselFatalError(std::string errMsg)15376 void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15377 {
15378     ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15379     handleEaselFatalErrorAsync();
15380 }
15381 
onOpened(std::unique_ptr<HdrPlusClient> client)15382 void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15383 {
15384     int rc = NO_ERROR;
15385 
15386     if (client == nullptr) {
15387         ALOGE("%s: Opened client is null.", __FUNCTION__);
15388         return;
15389     }
15390 
15391     logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
15392     ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15393 
15394     std::unique_lock<std::mutex> l(gHdrPlusClientLock);
15395     if (!gHdrPlusClientOpening) {
15396         ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15397         return;
15398     }
15399 
15400     gHdrPlusClient = std::move(client);
15401     gHdrPlusClientOpening = false;
15402     gHdrPlusClientOpenCond.notify_one();
15403 
15404     // Set static metadata.
15405     status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15406     if (res != OK) {
15407         LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15408             __FUNCTION__, strerror(-res), res);
15409         gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
15410         gHdrPlusClient = nullptr;
15411         return;
15412     }
15413 
15414     // Enable HDR+ mode.
15415     res = enableHdrPlusModeLocked();
15416     if (res != OK) {
15417         LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15418     }
15419 
15420     // Get Easel firmware version
15421     if (EaselManagerClientOpened) {
15422         rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
15423         if (rc != OK) {
15424             ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
15425         } else {
15426             mEaselFwUpdated = true;
15427         }
15428     }
15429 }
15430 
onOpenFailed(status_t err)15431 void QCamera3HardwareInterface::onOpenFailed(status_t err)
15432 {
15433     ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
15434     std::unique_lock<std::mutex> l(gHdrPlusClientLock);
15435     gHdrPlusClientOpening = false;
15436     gHdrPlusClientOpenCond.notify_one();
15437 }
15438 
onFatalError()15439 void QCamera3HardwareInterface::onFatalError()
15440 {
15441     ALOGE("%s: HDR+ client encountered a fatal error.", __FUNCTION__);
15442     handleEaselFatalErrorAsync();
15443 }
15444 
onShutter(uint32_t requestId,int64_t apSensorTimestampNs)15445 void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15446 {
15447     ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15448             __LINE__, requestId, apSensorTimestampNs);
15449 
15450     mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15451 }
15452 
onNextCaptureReady(uint32_t requestId)15453 void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15454 {
15455     pthread_mutex_lock(&mMutex);
15456 
15457     // Find the pending request for this result metadata.
15458     auto requestIter = mPendingRequestsList.begin();
15459     while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15460         requestIter++;
15461     }
15462 
15463     if (requestIter == mPendingRequestsList.end()) {
15464         ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15465         pthread_mutex_unlock(&mMutex);
15466         return;
15467     }
15468 
15469     requestIter->partial_result_cnt++;
15470 
15471     CameraMetadata metadata;
15472     uint8_t ready = true;
15473     metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15474 
15475     // Send it to framework.
15476     camera3_capture_result_t result = {};
15477 
15478     result.result = metadata.getAndLock();
15479     // Populate metadata result
15480     result.frame_number = requestId;
15481     result.num_output_buffers = 0;
15482     result.output_buffers = NULL;
15483     result.partial_result = requestIter->partial_result_cnt;
15484 
15485     orchestrateResult(&result);
15486     metadata.unlock(result.result);
15487 
15488     pthread_mutex_unlock(&mMutex);
15489 }
15490 
onPostview(uint32_t requestId,std::unique_ptr<std::vector<uint8_t>> postview,uint32_t width,uint32_t height,uint32_t stride,int32_t format)15491 void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15492         std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15493         uint32_t stride, int32_t format)
15494 {
15495     if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15496         ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15497                 __LINE__, width, height, requestId);
15498         char buf[FILENAME_MAX] = {};
15499         snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15500                 requestId, width, height);
15501 
15502         pbcamera::StreamConfiguration config = {};
15503         config.image.width = width;
15504         config.image.height = height;
15505         config.image.format = format;
15506 
15507         pbcamera::PlaneConfiguration plane = {};
15508         plane.stride = stride;
15509         plane.scanline = height;
15510 
15511         config.image.planes.push_back(plane);
15512 
15513         pbcamera::StreamBuffer buffer = {};
15514         buffer.streamId = 0;
15515         buffer.dmaBufFd = -1;
15516         buffer.data = postview->data();
15517         buffer.dataSize = postview->size();
15518 
15519         hdrplus_client_utils::writePpm(buf, config, buffer);
15520     }
15521 
15522     pthread_mutex_lock(&mMutex);
15523 
15524     // Find the pending request for this result metadata.
15525     auto requestIter = mPendingRequestsList.begin();
15526     while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15527         requestIter++;
15528     }
15529 
15530     if (requestIter == mPendingRequestsList.end()) {
15531         ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15532         pthread_mutex_unlock(&mMutex);
15533         return;
15534     }
15535 
15536     requestIter->partial_result_cnt++;
15537 
15538     CameraMetadata metadata;
15539     int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15540             static_cast<int32_t>(stride)};
15541     metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15542     metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15543 
15544     // Send it to framework.
15545     camera3_capture_result_t result = {};
15546 
15547     result.result = metadata.getAndLock();
15548     // Populate metadata result
15549     result.frame_number = requestId;
15550     result.num_output_buffers = 0;
15551     result.output_buffers = NULL;
15552     result.partial_result = requestIter->partial_result_cnt;
15553 
15554     orchestrateResult(&result);
15555     metadata.unlock(result.result);
15556 
15557     pthread_mutex_unlock(&mMutex);
15558 }
15559 
onCaptureResult(pbcamera::CaptureResult * result,const camera_metadata_t & resultMetadata)15560 void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
15561         const camera_metadata_t &resultMetadata)
15562 {
15563     if (result == nullptr) {
15564         ALOGE("%s: result is nullptr.", __FUNCTION__);
15565         return;
15566     }
15567 
15568     // Find the pending HDR+ request.
15569     HdrPlusPendingRequest pendingRequest;
15570     {
15571         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15572         auto req = mHdrPlusPendingRequests.find(result->requestId);
15573         pendingRequest = req->second;
15574     }
15575 
15576     // Update the result metadata with the settings of the HDR+ still capture request because
15577     // the result metadata belongs to a ZSL buffer.
15578     CameraMetadata metadata;
15579     metadata = &resultMetadata;
15580     updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15581     camera_metadata_t* updatedResultMetadata = metadata.release();
15582 
15583     uint32_t halSnapshotStreamId = 0;
15584     if (mPictureChannel != nullptr) {
15585         halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
15586     }
15587 
15588     auto halMetadata = std::make_shared<metadata_buffer_t>();
15589     clear_metadata_buffer(halMetadata.get());
15590 
15591     // Convert updated result metadata to HAL metadata.
15592     status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15593             halSnapshotStreamId, /*minFrameDuration*/0);
15594     if (res != 0) {
15595         ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
15596     }
15597 
15598     for (auto &outputBuffer : result->outputBuffers) {
15599         uint32_t streamId = outputBuffer.streamId;
15600 
15601         // Find the framework output buffer in the pending request.
15602         auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15603         if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15604             ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15605                     streamId);
15606             continue;
15607         }
15608 
15609         camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15610 
15611         // Find the channel for the output buffer.
15612         QCamera3ProcessingChannel *channel =
15613                 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15614 
15615         // Find the output buffer def.
15616         auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15617         if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15618             ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15619             continue;
15620         }
15621 
15622         std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
15623 
15624         // Check whether to dump the buffer.
15625         if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15626                 frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
15627             // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
15628             char prop[PROPERTY_VALUE_MAX];
15629             property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15630             bool dumpYuvOutput = atoi(prop);
15631 
15632             if (dumpYuvOutput) {
15633                 // Dump yuv buffer to a ppm file.
15634                 pbcamera::StreamConfiguration outputConfig;
15635                 status_t rc = fillPbStreamConfig(&outputConfig, streamId,
15636                         channel, /*stream index*/0);
15637                 if (rc == OK) {
15638                     char buf[FILENAME_MAX] = {};
15639                     snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15640                             result->requestId, streamId,
15641                             outputConfig.image.width, outputConfig.image.height);
15642 
15643                     hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
15644                 } else {
15645                     LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
15646                             "%s (%d).", __FUNCTION__, strerror(-rc), rc);
15647                 }
15648             }
15649         }
15650 
15651         if (channel == mPictureChannel) {
15652             // Return the buffer to pic channel for encoding.
15653             mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
15654                     frameworkOutputBuffer->buffer, result->requestId,
15655                     halMetadata);
15656         } else {
15657             // Return the buffer to camera framework.
15658             pthread_mutex_lock(&mMutex);
15659             handleBufferWithLock(frameworkOutputBuffer, result->requestId);
15660             channel->unregisterBuffer(outputBufferDef.get());
15661             pthread_mutex_unlock(&mMutex);
15662         }
15663     }
15664 
15665     // Send HDR+ metadata to framework.
15666     {
15667         pthread_mutex_lock(&mMutex);
15668 
15669         // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15670         handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
15671         pthread_mutex_unlock(&mMutex);
15672     }
15673 
15674     // Remove the HDR+ pending request.
15675     {
15676         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15677         auto req = mHdrPlusPendingRequests.find(result->requestId);
15678         mHdrPlusPendingRequests.erase(req);
15679     }
15680 }
15681 
onFailedCaptureResult(pbcamera::CaptureResult * failedResult)15682 void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15683 {
15684     if (failedResult == nullptr) {
15685         ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15686         return;
15687     }
15688 
15689     ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
15690 
15691     // Find the pending HDR+ request.
15692     HdrPlusPendingRequest pendingRequest;
15693     {
15694         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15695         auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15696         if (req == mHdrPlusPendingRequests.end()) {
15697             ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
15698             return;
15699         }
15700         pendingRequest = req->second;
15701     }
15702 
15703     for (auto &outputBuffer : failedResult->outputBuffers) {
15704         uint32_t streamId = outputBuffer.streamId;
15705 
15706         // Find the channel
15707         // Find the framework output buffer in the pending request.
15708         auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15709         if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15710             ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15711                     streamId);
15712             continue;
15713         }
15714 
15715         camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15716 
15717         // Find the channel for the output buffer.
15718         QCamera3ProcessingChannel *channel =
15719                 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15720 
15721         // Find the output buffer def.
15722         auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15723         if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15724             ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15725             continue;
15726         }
15727 
15728         std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
15729 
15730         if (channel == mPictureChannel) {
15731             // Return the buffer to pic channel.
15732             mPictureChannel->returnYuvBuffer(outputBufferDef.get());
15733         } else {
15734             channel->unregisterBuffer(outputBufferDef.get());
15735         }
15736     }
15737 
15738     // Remove the HDR+ pending request.
15739     {
15740         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15741         auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15742         mHdrPlusPendingRequests.erase(req);
15743     }
15744 
15745     pthread_mutex_lock(&mMutex);
15746 
15747     // Find the pending buffers.
15748     auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15749     while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15750         if (pendingBuffers->frame_number == failedResult->requestId) {
15751             break;
15752         }
15753         pendingBuffers++;
15754     }
15755 
15756     // Send out buffer errors for the pending buffers.
15757     if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15758         std::vector<camera3_stream_buffer_t> streamBuffers;
15759         for (auto &buffer : pendingBuffers->mPendingBufferList) {
15760             // Prepare a stream buffer.
15761             camera3_stream_buffer_t streamBuffer = {};
15762             streamBuffer.stream = buffer.stream;
15763             streamBuffer.buffer = buffer.buffer;
15764             streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15765             streamBuffer.acquire_fence = -1;
15766             streamBuffer.release_fence = -1;
15767 
15768             streamBuffers.push_back(streamBuffer);
15769 
15770             // Send out error buffer event.
15771             camera3_notify_msg_t notify_msg = {};
15772             notify_msg.type = CAMERA3_MSG_ERROR;
15773             notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15774             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15775             notify_msg.message.error.error_stream = buffer.stream;
15776 
15777             orchestrateNotify(&notify_msg);
15778         }
15779 
15780         camera3_capture_result_t result = {};
15781         result.frame_number = pendingBuffers->frame_number;
15782         result.num_output_buffers = streamBuffers.size();
15783         result.output_buffers = &streamBuffers[0];
15784 
15785         // Send out result with buffer errors.
15786         orchestrateResult(&result);
15787 
15788         // Remove pending buffers.
15789         mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15790     }
15791 
15792     // Remove pending request.
15793     auto halRequest = mPendingRequestsList.begin();
15794     while (halRequest != mPendingRequestsList.end()) {
15795         if (halRequest->frame_number == failedResult->requestId) {
15796             mPendingRequestsList.erase(halRequest);
15797             break;
15798         }
15799         halRequest++;
15800     }
15801 
15802     pthread_mutex_unlock(&mMutex);
15803 }
15804 
15805 
ShutterDispatcher(QCamera3HardwareInterface * parent)15806 ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15807         mParent(parent) {}
15808 
expectShutter(uint32_t frameNumber,bool isReprocess)15809 void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
15810 {
15811     std::lock_guard<std::mutex> lock(mLock);
15812 
15813     if (isReprocess) {
15814         mReprocessShutters.emplace(frameNumber, Shutter());
15815     } else {
15816         mShutters.emplace(frameNumber, Shutter());
15817     }
15818 }
15819 
markShutterReady(uint32_t frameNumber,uint64_t timestamp)15820 void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15821 {
15822     std::lock_guard<std::mutex> lock(mLock);
15823 
15824     std::map<uint32_t, Shutter> *shutters = nullptr;
15825 
15826     // Find the shutter entry.
15827     auto shutter = mShutters.find(frameNumber);
15828     if (shutter == mShutters.end()) {
15829         shutter = mReprocessShutters.find(frameNumber);
15830         if (shutter == mReprocessShutters.end()) {
15831             // Shutter was already sent.
15832             return;
15833         }
15834         shutters = &mReprocessShutters;
15835     } else {
15836         shutters = &mShutters;
15837     }
15838 
15839     if (shutter->second.ready) {
15840         // If shutter is already ready, don't update timestamp again.
15841         return;
15842     }
15843 
15844     // Make this frame's shutter ready.
15845     shutter->second.ready = true;
15846     shutter->second.timestamp = timestamp;
15847 
15848     // Iterate throught the shutters and send out shuters until the one that's not ready yet.
15849     shutter = shutters->begin();
15850     while (shutter != shutters->end()) {
15851         if (!shutter->second.ready) {
15852             // If this shutter is not ready, the following shutters can't be sent.
15853             break;
15854         }
15855 
15856         camera3_notify_msg_t msg = {};
15857         msg.type = CAMERA3_MSG_SHUTTER;
15858         msg.message.shutter.frame_number = shutter->first;
15859         msg.message.shutter.timestamp = shutter->second.timestamp;
15860         mParent->orchestrateNotify(&msg);
15861 
15862         shutter = shutters->erase(shutter);
15863     }
15864 }
15865 
clear(uint32_t frameNumber)15866 void ShutterDispatcher::clear(uint32_t frameNumber)
15867 {
15868     std::lock_guard<std::mutex> lock(mLock);
15869     mShutters.erase(frameNumber);
15870     mReprocessShutters.erase(frameNumber);
15871 }
15872 
clear()15873 void ShutterDispatcher::clear()
15874 {
15875     std::lock_guard<std::mutex> lock(mLock);
15876 
15877     // Log errors for stale shutters.
15878     for (auto &shutter : mShutters) {
15879         ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15880             __FUNCTION__, shutter.first, shutter.second.ready,
15881             shutter.second.timestamp);
15882     }
15883 
15884     // Log errors for stale reprocess shutters.
15885     for (auto &shutter : mReprocessShutters) {
15886         ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15887             __FUNCTION__, shutter.first, shutter.second.ready,
15888             shutter.second.timestamp);
15889     }
15890 
15891     mShutters.clear();
15892     mReprocessShutters.clear();
15893 }
15894 
OutputBufferDispatcher(QCamera3HardwareInterface * parent)15895 OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15896         mParent(parent) {}
15897 
configureStreams(camera3_stream_configuration_t * streamList)15898 status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15899 {
15900     std::lock_guard<std::mutex> lock(mLock);
15901     mStreamBuffers.clear();
15902     if (!streamList) {
15903         ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15904         return -EINVAL;
15905     }
15906 
15907     // Create a "frame-number -> buffer" map for each stream.
15908     for (uint32_t i = 0; i < streamList->num_streams; i++) {
15909         mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15910     }
15911 
15912     return OK;
15913 }
15914 
expectBuffer(uint32_t frameNumber,camera3_stream_t * stream)15915 status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15916 {
15917     std::lock_guard<std::mutex> lock(mLock);
15918 
15919     // Find the "frame-number -> buffer" map for the stream.
15920     auto buffers = mStreamBuffers.find(stream);
15921     if (buffers == mStreamBuffers.end()) {
15922         ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15923         return -EINVAL;
15924     }
15925 
15926     // Create an unready buffer for this frame number.
15927     buffers->second.emplace(frameNumber, Buffer());
15928     return OK;
15929 }
15930 
markBufferReady(uint32_t frameNumber,const camera3_stream_buffer_t & buffer)15931 void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15932         const camera3_stream_buffer_t &buffer)
15933 {
15934     std::lock_guard<std::mutex> lock(mLock);
15935 
15936     // Find the frame number -> buffer map for the stream.
15937     auto buffers = mStreamBuffers.find(buffer.stream);
15938     if (buffers == mStreamBuffers.end()) {
15939         ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15940         return;
15941     }
15942 
15943     // Find the unready buffer this frame number and mark it ready.
15944     auto pendingBuffer = buffers->second.find(frameNumber);
15945     if (pendingBuffer == buffers->second.end()) {
15946         ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15947         return;
15948     }
15949 
15950     pendingBuffer->second.ready = true;
15951     pendingBuffer->second.buffer = buffer;
15952 
15953     // Iterate through the buffers and send out buffers until the one that's not ready yet.
15954     pendingBuffer = buffers->second.begin();
15955     while (pendingBuffer != buffers->second.end()) {
15956         if (!pendingBuffer->second.ready) {
15957             // If this buffer is not ready, the following buffers can't be sent.
15958             break;
15959         }
15960 
15961         camera3_capture_result_t result = {};
15962         result.frame_number = pendingBuffer->first;
15963         result.num_output_buffers = 1;
15964         result.output_buffers = &pendingBuffer->second.buffer;
15965 
15966         // Send out result with buffer errors.
15967         mParent->orchestrateResult(&result);
15968 
15969         pendingBuffer = buffers->second.erase(pendingBuffer);
15970     }
15971 }
15972 
clear(bool clearConfiguredStreams)15973 void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15974 {
15975     std::lock_guard<std::mutex> lock(mLock);
15976 
15977     // Log errors for stale buffers.
15978     for (auto &buffers : mStreamBuffers) {
15979         for (auto &buffer : buffers.second) {
15980             ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15981                 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15982         }
15983         buffers.second.clear();
15984     }
15985 
15986     if (clearConfiguredStreams) {
15987         mStreamBuffers.clear();
15988     }
15989 }
15990 
15991 }; //end namespace qcamera
15992