• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define LOG_TAG "QCamera3HWI"
31 //#define LOG_NDEBUG 0
32 
33 #define __STDC_LIMIT_MACROS
34 
35 // To remove
36 #include <cutils/properties.h>
37 
38 // System dependencies
39 #include <dlfcn.h>
40 #include <fcntl.h>
41 #include <stdio.h>
42 #include <stdlib.h>
43 #include <time.h>
44 #include <sync/sync.h>
45 #include "gralloc_priv.h"
46 
47 // Display dependencies
48 #include "qdMetaData.h"
49 
50 // Camera dependencies
51 #include "android/QCamera3External.h"
52 #include "util/QCameraFlash.h"
53 #include "QCamera3HWI.h"
54 #include "QCamera3VendorTags.h"
55 #include "QCameraTrace.h"
56 
57 extern "C" {
58 #include "mm_camera_dbg.h"
59 }
60 #include "cam_cond.h"
61 
62 using namespace android;
63 
64 namespace qcamera {
65 
66 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
67 
68 #define EMPTY_PIPELINE_DELAY 2
69 #define PARTIAL_RESULT_COUNT 2
70 #define FRAME_SKIP_DELAY     0
71 
72 #define MAX_VALUE_8BIT ((1<<8)-1)
73 #define MAX_VALUE_10BIT ((1<<10)-1)
74 #define MAX_VALUE_12BIT ((1<<12)-1)
75 
76 #define VIDEO_4K_WIDTH  3840
77 #define VIDEO_4K_HEIGHT 2160
78 
79 #define MAX_EIS_WIDTH 3840
80 #define MAX_EIS_HEIGHT 2160
81 
82 #define MAX_RAW_STREAMS        1
83 #define MAX_STALLING_STREAMS   1
84 #define MAX_PROCESSED_STREAMS  3
85 /* Batch mode is enabled only if FPS set is equal to or greater than this */
86 #define MIN_FPS_FOR_BATCH_MODE (120)
87 #define PREVIEW_FPS_FOR_HFR    (30)
88 #define DEFAULT_VIDEO_FPS      (30.0)
89 #define TEMPLATE_MAX_PREVIEW_FPS (30.0)
90 #define MAX_HFR_BATCH_SIZE     (8)
91 #define REGIONS_TUPLE_COUNT    5
92 #define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
93 // Set a threshold for detection of missing buffers //seconds
94 #define MISSING_REQUEST_BUF_TIMEOUT 3
95 #define FLUSH_TIMEOUT 3
96 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
97 
98 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
99                                               CAM_QCOM_FEATURE_CROP |\
100                                               CAM_QCOM_FEATURE_ROTATION |\
101                                               CAM_QCOM_FEATURE_SHARPNESS |\
102                                               CAM_QCOM_FEATURE_SCALE |\
103                                               CAM_QCOM_FEATURE_CAC |\
104                                               CAM_QCOM_FEATURE_CDS )
105 /* Per configuration size for static metadata length*/
106 #define PER_CONFIGURATION_SIZE_3 (3)
107 
108 #define TIMEOUT_NEVER -1
109 
110 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
111 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
112 extern pthread_mutex_t gCamLock;
113 volatile uint32_t gCamHal3LogLevel = 1;
114 extern uint8_t gNumCameraSessions;
115 
116 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
117     {"On",  CAM_CDS_MODE_ON},
118     {"Off", CAM_CDS_MODE_OFF},
119     {"Auto",CAM_CDS_MODE_AUTO}
120 };
121 
122 const QCamera3HardwareInterface::QCameraMap<
123         camera_metadata_enum_android_control_effect_mode_t,
124         cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
125     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
126     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
127     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
128     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
129     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
130     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
131     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
132     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
133     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
134 };
135 
136 const QCamera3HardwareInterface::QCameraMap<
137         camera_metadata_enum_android_control_awb_mode_t,
138         cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
139     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
140     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
141     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
142     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
143     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
144     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
145     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
146     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
147     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
148 };
149 
150 const QCamera3HardwareInterface::QCameraMap<
151         camera_metadata_enum_android_control_scene_mode_t,
152         cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
153     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
154     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
155     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
156     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
157     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
158     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
159     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
160     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
161     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
162     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
163     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
164     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
165     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
166     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
167     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
168     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
169 };
170 
171 const QCamera3HardwareInterface::QCameraMap<
172         camera_metadata_enum_android_control_af_mode_t,
173         cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
174     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
175     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
176     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
177     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
178     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
179     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
180     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
181 };
182 
183 const QCamera3HardwareInterface::QCameraMap<
184         camera_metadata_enum_android_color_correction_aberration_mode_t,
185         cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
186     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
187             CAM_COLOR_CORRECTION_ABERRATION_OFF },
188     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
189             CAM_COLOR_CORRECTION_ABERRATION_FAST },
190     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
191             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
192 };
193 
194 const QCamera3HardwareInterface::QCameraMap<
195         camera_metadata_enum_android_control_ae_antibanding_mode_t,
196         cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
197     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
198     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
199     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
200     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
201 };
202 
203 const QCamera3HardwareInterface::QCameraMap<
204         camera_metadata_enum_android_control_ae_mode_t,
205         cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
206     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
207     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
208     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
209     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
210     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
211 };
212 
213 const QCamera3HardwareInterface::QCameraMap<
214         camera_metadata_enum_android_flash_mode_t,
215         cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
216     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
217     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
218     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
219 };
220 
221 const QCamera3HardwareInterface::QCameraMap<
222         camera_metadata_enum_android_statistics_face_detect_mode_t,
223         cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
224     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
225     { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
226     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
227 };
228 
229 const QCamera3HardwareInterface::QCameraMap<
230         camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
231         cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
232     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
233       CAM_FOCUS_UNCALIBRATED },
234     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
235       CAM_FOCUS_APPROXIMATE },
236     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
237       CAM_FOCUS_CALIBRATED }
238 };
239 
240 const QCamera3HardwareInterface::QCameraMap<
241         camera_metadata_enum_android_lens_state_t,
242         cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
243     { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
244     { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
245 };
246 
247 const int32_t available_thumbnail_sizes[] = {0, 0,
248                                              176, 144,
249                                              240, 144,
250                                              256, 144,
251                                              240, 160,
252                                              256, 154,
253                                              240, 240,
254                                              320, 240};
255 
256 const QCamera3HardwareInterface::QCameraMap<
257         camera_metadata_enum_android_sensor_test_pattern_mode_t,
258         cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
259     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
260     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
261     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
262     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
263     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
264     { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
265 };
266 
267 /* Since there is no mapping for all the options some Android enum are not listed.
268  * Also, the order in this list is important because while mapping from HAL to Android it will
269  * traverse from lower to higher index which means that for HAL values that are map to different
270  * Android values, the traverse logic will select the first one found.
271  */
272 const QCamera3HardwareInterface::QCameraMap<
273         camera_metadata_enum_android_sensor_reference_illuminant1_t,
274         cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
275     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
276     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
277     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
278     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
279     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
280     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
281     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
282     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
283     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
284     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
285     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
286     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
287     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
288     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
289     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
290     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
291 };
292 
293 const QCamera3HardwareInterface::QCameraMap<
294         int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
295     { 60, CAM_HFR_MODE_60FPS},
296     { 90, CAM_HFR_MODE_90FPS},
297     { 120, CAM_HFR_MODE_120FPS},
298     { 150, CAM_HFR_MODE_150FPS},
299     { 180, CAM_HFR_MODE_180FPS},
300     { 210, CAM_HFR_MODE_210FPS},
301     { 240, CAM_HFR_MODE_240FPS},
302     { 480, CAM_HFR_MODE_480FPS},
303 };
304 
305 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
306     .initialize                         = QCamera3HardwareInterface::initialize,
307     .configure_streams                  = QCamera3HardwareInterface::configure_streams,
308     .register_stream_buffers            = NULL,
309     .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
310     .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
311     .get_metadata_vendor_tag_ops        = NULL,
312     .dump                               = QCamera3HardwareInterface::dump,
313     .flush                              = QCamera3HardwareInterface::flush,
314     .reserved                           = {0},
315 };
316 
317 // initialise to some default value
318 uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
319 
320 /*===========================================================================
321  * FUNCTION   : QCamera3HardwareInterface
322  *
323  * DESCRIPTION: constructor of QCamera3HardwareInterface
324  *
325  * PARAMETERS :
326  *   @cameraId  : camera ID
327  *
328  * RETURN     : none
329  *==========================================================================*/
QCamera3HardwareInterface(uint32_t cameraId,const camera_module_callbacks_t * callbacks)330 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
331         const camera_module_callbacks_t *callbacks)
332     : mCameraId(cameraId),
333       mCameraHandle(NULL),
334       mCameraInitialized(false),
335       mCallbackOps(NULL),
336       mMetadataChannel(NULL),
337       mPictureChannel(NULL),
338       mRawChannel(NULL),
339       mSupportChannel(NULL),
340       mAnalysisChannel(NULL),
341       mRawDumpChannel(NULL),
342       mDummyBatchChannel(NULL),
343       m_perfLock(),
344       mCommon(),
345       mChannelHandle(0),
346       mFirstConfiguration(true),
347       mFlush(false),
348       mFlushPerf(false),
349       mParamHeap(NULL),
350       mParameters(NULL),
351       mPrevParameters(NULL),
352       m_bIsVideo(false),
353       m_bIs4KVideo(false),
354       m_bEisSupportedSize(false),
355       m_bEisEnable(false),
356       m_MobicatMask(0),
357       mMinProcessedFrameDuration(0),
358       mMinJpegFrameDuration(0),
359       mMinRawFrameDuration(0),
360       mMetaFrameCount(0U),
361       mUpdateDebugLevel(false),
362       mCallbacks(callbacks),
363       mCaptureIntent(0),
364       mCacMode(0),
365       mHybridAeEnable(0),
366       /* DevCamDebug metadata internal m control*/
367       mDevCamDebugMetaEnable(0),
368       /* DevCamDebug metadata end */
369       mBatchSize(0),
370       mToBeQueuedVidBufs(0),
371       mHFRVideoFps(DEFAULT_VIDEO_FPS),
372       mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
373       mFirstFrameNumberInBatch(0),
374       mNeedSensorRestart(false),
375       mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
376       mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
377       mLdafCalibExist(false),
378       mPowerHintEnabled(false),
379       mLastCustIntentFrmNum(-1),
380       mState(CLOSED),
381       mIsDeviceLinked(false),
382       mIsMainCamera(true),
383       mLinkedCameraId(0),
384       m_pRelCamSyncHeap(NULL),
385       m_pRelCamSyncBuf(NULL)
386 {
387     getLogLevel();
388     m_perfLock.lock_init();
389     mCommon.init(gCamCapability[cameraId]);
390     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
391     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
392     mCameraDevice.common.close = close_camera_device;
393     mCameraDevice.ops = &mCameraOps;
394     mCameraDevice.priv = this;
395     gCamCapability[cameraId]->version = CAM_HAL_V3;
396     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
397     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
398     gCamCapability[cameraId]->min_num_pp_bufs = 3;
399 
400     PTHREAD_COND_INIT(&mBuffersCond);
401 
402     PTHREAD_COND_INIT(&mRequestCond);
403     mPendingLiveRequest = 0;
404     mCurrentRequestId = -1;
405     pthread_mutex_init(&mMutex, NULL);
406 
407     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
408         mDefaultMetadata[i] = NULL;
409 
410     // Getting system props of different kinds
411     char prop[PROPERTY_VALUE_MAX];
412     memset(prop, 0, sizeof(prop));
413     property_get("persist.camera.raw.dump", prop, "0");
414     mEnableRawDump = atoi(prop);
415     if (mEnableRawDump)
416         LOGD("Raw dump from Camera HAL enabled");
417 
418     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
419     memset(mLdafCalib, 0, sizeof(mLdafCalib));
420 
421     memset(prop, 0, sizeof(prop));
422     property_get("persist.camera.tnr.preview", prop, "0");
423     m_bTnrPreview = (uint8_t)atoi(prop);
424 
425     memset(prop, 0, sizeof(prop));
426     property_get("persist.camera.tnr.video", prop, "0");
427     m_bTnrVideo = (uint8_t)atoi(prop);
428 
429     memset(prop, 0, sizeof(prop));
430     property_get("persist.camera.avtimer.debug", prop, "0");
431     m_debug_avtimer = (uint8_t)atoi(prop);
432 
433     //Load and read GPU library.
434     lib_surface_utils = NULL;
435     LINK_get_surface_pixel_alignment = NULL;
436     mSurfaceStridePadding = CAM_PAD_TO_32;
437     lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
438     if (lib_surface_utils) {
439         *(void **)&LINK_get_surface_pixel_alignment =
440                 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
441          if (LINK_get_surface_pixel_alignment) {
442              mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
443          }
444          dlclose(lib_surface_utils);
445     }
446 
447     m60HzZone = is60HzZone();
448 }
449 
450 /*===========================================================================
451  * FUNCTION   : ~QCamera3HardwareInterface
452  *
453  * DESCRIPTION: destructor of QCamera3HardwareInterface
454  *
455  * PARAMETERS : none
456  *
457  * RETURN     : none
458  *==========================================================================*/
~QCamera3HardwareInterface()459 QCamera3HardwareInterface::~QCamera3HardwareInterface()
460 {
461     LOGD("E");
462 
463     /* Turn off current power hint before acquiring perfLock in case they
464      * conflict with each other */
465     disablePowerHint();
466 
467     m_perfLock.lock_acq();
468 
469     /* We need to stop all streams before deleting any stream */
470     if (mRawDumpChannel) {
471         mRawDumpChannel->stop();
472     }
473 
474     // NOTE: 'camera3_stream_t *' objects are already freed at
475     //        this stage by the framework
476     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
477         it != mStreamInfo.end(); it++) {
478         QCamera3ProcessingChannel *channel = (*it)->channel;
479         if (channel) {
480             channel->stop();
481         }
482     }
483     if (mSupportChannel)
484         mSupportChannel->stop();
485 
486     if (mAnalysisChannel) {
487         mAnalysisChannel->stop();
488     }
489     if (mMetadataChannel) {
490         mMetadataChannel->stop();
491     }
492     if (mChannelHandle) {
493         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
494                 mChannelHandle);
495         LOGD("stopping channel %d", mChannelHandle);
496     }
497 
498     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
499         it != mStreamInfo.end(); it++) {
500         QCamera3ProcessingChannel *channel = (*it)->channel;
501         if (channel)
502             delete channel;
503         free (*it);
504     }
505     if (mSupportChannel) {
506         delete mSupportChannel;
507         mSupportChannel = NULL;
508     }
509 
510     if (mAnalysisChannel) {
511         delete mAnalysisChannel;
512         mAnalysisChannel = NULL;
513     }
514     if (mRawDumpChannel) {
515         delete mRawDumpChannel;
516         mRawDumpChannel = NULL;
517     }
518     if (mDummyBatchChannel) {
519         delete mDummyBatchChannel;
520         mDummyBatchChannel = NULL;
521     }
522     mPictureChannel = NULL;
523 
524     if (mMetadataChannel) {
525         delete mMetadataChannel;
526         mMetadataChannel = NULL;
527     }
528 
529     /* Clean up all channels */
530     if (mCameraInitialized) {
531         if(!mFirstConfiguration){
532             //send the last unconfigure
533             cam_stream_size_info_t stream_config_info;
534             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
535             stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
536             stream_config_info.buffer_info.max_buffers =
537                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
538             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
539                     stream_config_info);
540             int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
541             if (rc < 0) {
542                 LOGE("set_parms failed for unconfigure");
543             }
544         }
545         deinitParameters();
546     }
547 
548     if (mChannelHandle) {
549         mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
550                 mChannelHandle);
551         LOGH("deleting channel %d", mChannelHandle);
552         mChannelHandle = 0;
553     }
554 
555     if (mState != CLOSED)
556         closeCamera();
557 
558     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
559         req.mPendingBufferList.clear();
560     }
561     mPendingBuffersMap.mPendingBuffersInRequest.clear();
562     mPendingReprocessResultList.clear();
563     for (pendingRequestIterator i = mPendingRequestsList.begin();
564             i != mPendingRequestsList.end();) {
565         i = erasePendingRequest(i);
566     }
567     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
568         if (mDefaultMetadata[i])
569             free_camera_metadata(mDefaultMetadata[i]);
570 
571     m_perfLock.lock_rel();
572     m_perfLock.lock_deinit();
573 
574     pthread_cond_destroy(&mRequestCond);
575 
576     pthread_cond_destroy(&mBuffersCond);
577 
578     pthread_mutex_destroy(&mMutex);
579     LOGD("X");
580 }
581 
582 /*===========================================================================
583  * FUNCTION   : erasePendingRequest
584  *
585  * DESCRIPTION: function to erase a desired pending request after freeing any
586  *              allocated memory
587  *
588  * PARAMETERS :
589  *   @i       : iterator pointing to pending request to be erased
590  *
591  * RETURN     : iterator pointing to the next request
592  *==========================================================================*/
593 QCamera3HardwareInterface::pendingRequestIterator
erasePendingRequest(pendingRequestIterator i)594         QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
595 {
596     if (i->input_buffer != NULL) {
597         free(i->input_buffer);
598         i->input_buffer = NULL;
599     }
600     if (i->settings != NULL)
601         free_camera_metadata((camera_metadata_t*)i->settings);
602     return mPendingRequestsList.erase(i);
603 }
604 
605 /*===========================================================================
606  * FUNCTION   : camEvtHandle
607  *
608  * DESCRIPTION: Function registered to mm-camera-interface to handle events
609  *
610  * PARAMETERS :
611  *   @camera_handle : interface layer camera handle
612  *   @evt           : ptr to event
613  *   @user_data     : user data ptr
614  *
615  * RETURN     : none
616  *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)617 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
618                                           mm_camera_event_t *evt,
619                                           void *user_data)
620 {
621     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
622     if (obj && evt) {
623         switch(evt->server_event_type) {
624             case CAM_EVENT_TYPE_DAEMON_DIED:
625                 pthread_mutex_lock(&obj->mMutex);
626                 obj->mState = ERROR;
627                 pthread_mutex_unlock(&obj->mMutex);
628                 LOGE("Fatal, camera daemon died");
629                 break;
630 
631             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
632                 LOGD("HAL got request pull from Daemon");
633                 pthread_mutex_lock(&obj->mMutex);
634                 obj->mWokenUpByDaemon = true;
635                 obj->unblockRequestIfNecessary();
636                 pthread_mutex_unlock(&obj->mMutex);
637                 break;
638 
639             default:
640                 LOGW("Warning: Unhandled event %d",
641                         evt->server_event_type);
642                 break;
643         }
644     } else {
645         LOGE("NULL user_data/evt");
646     }
647 }
648 
649 /*===========================================================================
650  * FUNCTION   : openCamera
651  *
652  * DESCRIPTION: open camera
653  *
654  * PARAMETERS :
655  *   @hw_device  : double ptr for camera device struct
656  *
657  * RETURN     : int32_t type of status
658  *              NO_ERROR  -- success
659  *              none-zero failure code
660  *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)661 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
662 {
663     int rc = 0;
664     if (mState != CLOSED) {
665         *hw_device = NULL;
666         return PERMISSION_DENIED;
667     }
668 
669     m_perfLock.lock_acq();
670     LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
671              mCameraId);
672 
673     rc = openCamera();
674     if (rc == 0) {
675         *hw_device = &mCameraDevice.common;
676     } else
677         *hw_device = NULL;
678 
679     m_perfLock.lock_rel();
680     LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
681              mCameraId, rc);
682 
683     if (rc == NO_ERROR) {
684         mState = OPENED;
685     }
686     return rc;
687 }
688 
689 /*===========================================================================
690  * FUNCTION   : openCamera
691  *
692  * DESCRIPTION: open camera
693  *
694  * PARAMETERS : none
695  *
696  * RETURN     : int32_t type of status
697  *              NO_ERROR  -- success
698  *              none-zero failure code
699  *==========================================================================*/
openCamera()700 int QCamera3HardwareInterface::openCamera()
701 {
702     int rc = 0;
703     char value[PROPERTY_VALUE_MAX];
704 
705     KPI_ATRACE_CALL();
706     if (mCameraHandle) {
707         LOGE("Failure: Camera already opened");
708         return ALREADY_EXISTS;
709     }
710 
711     rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
712     if (rc < 0) {
713         LOGE("Failed to reserve flash for camera id: %d",
714                 mCameraId);
715         return UNKNOWN_ERROR;
716     }
717 
718     rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
719     if (rc) {
720         LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
721         return rc;
722     }
723 
724     if (!mCameraHandle) {
725         LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
726         return -ENODEV;
727     }
728 
729     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
730             camEvtHandle, (void *)this);
731 
732     if (rc < 0) {
733         LOGE("Error, failed to register event callback");
734         /* Not closing camera here since it is already handled in destructor */
735         return FAILED_TRANSACTION;
736     }
737 
738     mExifParams.debug_params =
739             (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
740     if (mExifParams.debug_params) {
741         memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
742     } else {
743         LOGE("Out of Memory. Allocation failed for 3A debug exif params");
744         return NO_MEMORY;
745     }
746     mFirstConfiguration = true;
747 
748     //Notify display HAL that a camera session is active.
749     //But avoid calling the same during bootup because camera service might open/close
750     //cameras at boot time during its initialization and display service will also internally
751     //wait for camera service to initialize first while calling this display API, resulting in a
752     //deadlock situation. Since boot time camera open/close calls are made only to fetch
753     //capabilities, no need of this display bw optimization.
754     //Use "service.bootanim.exit" property to know boot status.
755     property_get("service.bootanim.exit", value, "0");
756     if (atoi(value) == 1) {
757         pthread_mutex_lock(&gCamLock);
758         if (gNumCameraSessions++ == 0) {
759             setCameraLaunchStatus(true);
760         }
761         pthread_mutex_unlock(&gCamLock);
762     }
763 
764     //fill the session id needed while linking dual cam
765     pthread_mutex_lock(&gCamLock);
766     rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
767         &sessionId[mCameraId]);
768     pthread_mutex_unlock(&gCamLock);
769 
770     if (rc < 0) {
771         LOGE("Error, failed to get sessiion id");
772         return UNKNOWN_ERROR;
773     } else {
774         //Allocate related cam sync buffer
775         //this is needed for the payload that goes along with bundling cmd for related
776         //camera use cases
777         m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
778         rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
779         if(rc != OK) {
780             rc = NO_MEMORY;
781             LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
782             return NO_MEMORY;
783         }
784 
785         //Map memory for related cam sync buffer
786         rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
787                 CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
788                 m_pRelCamSyncHeap->getFd(0),
789                 sizeof(cam_sync_related_sensors_event_info_t),
790                 m_pRelCamSyncHeap->getPtr(0));
791         if(rc < 0) {
792             LOGE("Dualcam: failed to map Related cam sync buffer");
793             rc = FAILED_TRANSACTION;
794             return NO_MEMORY;
795         }
796         m_pRelCamSyncBuf =
797                 (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
798     }
799 
800     LOGH("mCameraId=%d",mCameraId);
801 
802     return NO_ERROR;
803 }
804 
805 /*===========================================================================
806  * FUNCTION   : closeCamera
807  *
808  * DESCRIPTION: close camera
809  *
810  * PARAMETERS : none
811  *
812  * RETURN     : int32_t type of status
813  *              NO_ERROR  -- success
814  *              none-zero failure code
815  *==========================================================================*/
closeCamera()816 int QCamera3HardwareInterface::closeCamera()
817 {
818     KPI_ATRACE_CALL();
819     int rc = NO_ERROR;
820     char value[PROPERTY_VALUE_MAX];
821 
822     LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
823              mCameraId);
824     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
825     mCameraHandle = NULL;
826 
827     //reset session id to some invalid id
828     pthread_mutex_lock(&gCamLock);
829     sessionId[mCameraId] = 0xDEADBEEF;
830     pthread_mutex_unlock(&gCamLock);
831 
832     //Notify display HAL that there is no active camera session
833     //but avoid calling the same during bootup. Refer to openCamera
834     //for more details.
835     property_get("service.bootanim.exit", value, "0");
836     if (atoi(value) == 1) {
837         pthread_mutex_lock(&gCamLock);
838         if (--gNumCameraSessions == 0) {
839             setCameraLaunchStatus(false);
840         }
841         pthread_mutex_unlock(&gCamLock);
842     }
843 
844     if (NULL != m_pRelCamSyncHeap) {
845         m_pRelCamSyncHeap->deallocate();
846         delete m_pRelCamSyncHeap;
847         m_pRelCamSyncHeap = NULL;
848         m_pRelCamSyncBuf = NULL;
849     }
850 
851     if (mExifParams.debug_params) {
852         free(mExifParams.debug_params);
853         mExifParams.debug_params = NULL;
854     }
855     if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
856         LOGW("Failed to release flash for camera id: %d",
857                 mCameraId);
858     }
859     mState = CLOSED;
860     LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
861          mCameraId, rc);
862     return rc;
863 }
864 
865 /*===========================================================================
866  * FUNCTION   : initialize
867  *
868  * DESCRIPTION: Initialize frameworks callback functions
869  *
870  * PARAMETERS :
871  *   @callback_ops : callback function to frameworks
872  *
873  * RETURN     :
874  *
875  *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)876 int QCamera3HardwareInterface::initialize(
877         const struct camera3_callback_ops *callback_ops)
878 {
879     ATRACE_CALL();
880     int rc;
881 
882     LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
883     pthread_mutex_lock(&mMutex);
884 
885     // Validate current state
886     switch (mState) {
887         case OPENED:
888             /* valid state */
889             break;
890         default:
891             LOGE("Invalid state %d", mState);
892             rc = -ENODEV;
893             goto err1;
894     }
895 
896     rc = initParameters();
897     if (rc < 0) {
898         LOGE("initParamters failed %d", rc);
899         goto err1;
900     }
901     mCallbackOps = callback_ops;
902 
903     mChannelHandle = mCameraHandle->ops->add_channel(
904             mCameraHandle->camera_handle, NULL, NULL, this);
905     if (mChannelHandle == 0) {
906         LOGE("add_channel failed");
907         rc = -ENOMEM;
908         pthread_mutex_unlock(&mMutex);
909         return rc;
910     }
911 
912     pthread_mutex_unlock(&mMutex);
913     mCameraInitialized = true;
914     mState = INITIALIZED;
915     LOGI("X");
916     return 0;
917 
918 err1:
919     pthread_mutex_unlock(&mMutex);
920     return rc;
921 }
922 
923 /*===========================================================================
924  * FUNCTION   : validateStreamDimensions
925  *
926  * DESCRIPTION: Check if the configuration requested are those advertised
927  *
928  * PARAMETERS :
929  *   @stream_list : streams to be configured
930  *
931  * RETURN     :
932  *
933  *==========================================================================*/
validateStreamDimensions(camera3_stream_configuration_t * streamList)934 int QCamera3HardwareInterface::validateStreamDimensions(
935         camera3_stream_configuration_t *streamList)
936 {
937     int rc = NO_ERROR;
938     size_t count = 0;
939 
940     camera3_stream_t *inputStream = NULL;
941     /*
942     * Loop through all streams to find input stream if it exists*
943     */
944     for (size_t i = 0; i< streamList->num_streams; i++) {
945         if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
946             if (inputStream != NULL) {
947                 LOGE("Error, Multiple input streams requested");
948                 return -EINVAL;
949             }
950             inputStream = streamList->streams[i];
951         }
952     }
953     /*
954     * Loop through all streams requested in configuration
955     * Check if unsupported sizes have been requested on any of them
956     */
957     for (size_t j = 0; j < streamList->num_streams; j++) {
958         bool sizeFound = false;
959         camera3_stream_t *newStream = streamList->streams[j];
960 
961         uint32_t rotatedHeight = newStream->height;
962         uint32_t rotatedWidth = newStream->width;
963         if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
964                 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
965             rotatedHeight = newStream->width;
966             rotatedWidth = newStream->height;
967         }
968 
969         /*
970         * Sizes are different for each type of stream format check against
971         * appropriate table.
972         */
973         switch (newStream->format) {
974         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
975         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
976         case HAL_PIXEL_FORMAT_RAW10:
977             count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
978             for (size_t i = 0; i < count; i++) {
979                 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
980                         (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
981                     sizeFound = true;
982                     break;
983                 }
984             }
985             break;
986         case HAL_PIXEL_FORMAT_BLOB:
987             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
988             /* Verify set size against generated sizes table */
989             for (size_t i = 0; i < count; i++) {
990                 if (((int32_t)rotatedWidth ==
991                         gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
992                         ((int32_t)rotatedHeight ==
993                         gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
994                     sizeFound = true;
995                     break;
996                 }
997             }
998             break;
999         case HAL_PIXEL_FORMAT_YCbCr_420_888:
1000         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1001         default:
1002             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1003                     || newStream->stream_type == CAMERA3_STREAM_INPUT
1004                     || IS_USAGE_ZSL(newStream->usage)) {
1005                 if (((int32_t)rotatedWidth ==
1006                                 gCamCapability[mCameraId]->active_array_size.width) &&
1007                                 ((int32_t)rotatedHeight ==
1008                                 gCamCapability[mCameraId]->active_array_size.height)) {
1009                     sizeFound = true;
1010                     break;
1011                 }
1012                 /* We could potentially break here to enforce ZSL stream
1013                  * set from frameworks always is full active array size
1014                  * but it is not clear from the spc if framework will always
1015                  * follow that, also we have logic to override to full array
1016                  * size, so keeping the logic lenient at the moment
1017                  */
1018             }
1019             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1020                     MAX_SIZES_CNT);
1021             for (size_t i = 0; i < count; i++) {
1022                 if (((int32_t)rotatedWidth ==
1023                             gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1024                             ((int32_t)rotatedHeight ==
1025                             gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1026                     sizeFound = true;
1027                     break;
1028                 }
1029             }
1030             break;
1031         } /* End of switch(newStream->format) */
1032 
1033         /* We error out even if a single stream has unsupported size set */
1034         if (!sizeFound) {
1035             LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1036                     rotatedWidth, rotatedHeight, newStream->format,
1037                     gCamCapability[mCameraId]->active_array_size.width,
1038                     gCamCapability[mCameraId]->active_array_size.height);
1039             rc = -EINVAL;
1040             break;
1041         }
1042     } /* End of for each stream */
1043     return rc;
1044 }
1045 
1046 /*==============================================================================
1047  * FUNCTION   : isSupportChannelNeeded
1048  *
1049  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1050  *
1051  * PARAMETERS :
1052  *   @stream_list : streams to be configured
1053  *   @stream_config_info : the config info for streams to be configured
1054  *
1055  * RETURN     : Boolen true/false decision
1056  *
1057  *==========================================================================*/
isSupportChannelNeeded(camera3_stream_configuration_t * streamList,cam_stream_size_info_t stream_config_info)1058 bool QCamera3HardwareInterface::isSupportChannelNeeded(
1059         camera3_stream_configuration_t *streamList,
1060         cam_stream_size_info_t stream_config_info)
1061 {
1062     uint32_t i;
1063     bool pprocRequested = false;
1064     /* Check for conditions where PProc pipeline does not have any streams*/
1065     for (i = 0; i < stream_config_info.num_streams; i++) {
1066         if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1067                 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1068             pprocRequested = true;
1069             break;
1070         }
1071     }
1072 
1073     if (pprocRequested == false )
1074         return true;
1075 
1076     /* Dummy stream needed if only raw or jpeg streams present */
1077     for (i = 0; i < streamList->num_streams; i++) {
1078         switch(streamList->streams[i]->format) {
1079             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1080             case HAL_PIXEL_FORMAT_RAW10:
1081             case HAL_PIXEL_FORMAT_RAW16:
1082             case HAL_PIXEL_FORMAT_BLOB:
1083                 break;
1084             default:
1085                 return false;
1086         }
1087     }
1088     return true;
1089 }
1090 
1091 /*==============================================================================
1092  * FUNCTION   : getSensorOutputSize
1093  *
1094  * DESCRIPTION: Get sensor output size based on current stream configuratoin
1095  *
1096  * PARAMETERS :
1097  *   @sensor_dim : sensor output dimension (output)
1098  *
1099  * RETURN     : int32_t type of status
1100  *              NO_ERROR  -- success
1101  *              none-zero failure code
1102  *
1103  *==========================================================================*/
getSensorOutputSize(cam_dimension_t & sensor_dim)1104 int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1105 {
1106     int32_t rc = NO_ERROR;
1107 
1108     cam_dimension_t max_dim = {0, 0};
1109     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1110         if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1111             max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1112         if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1113             max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1114     }
1115 
1116     clear_metadata_buffer(mParameters);
1117 
1118     rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1119             max_dim);
1120     if (rc != NO_ERROR) {
1121         LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1122         return rc;
1123     }
1124 
1125     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1126     if (rc != NO_ERROR) {
1127         LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1128         return rc;
1129     }
1130 
1131     clear_metadata_buffer(mParameters);
1132     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1133 
1134     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1135             mParameters);
1136     if (rc != NO_ERROR) {
1137         LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1138         return rc;
1139     }
1140 
1141     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1142     LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1143 
1144     return rc;
1145 }
1146 
1147 /*==============================================================================
1148  * FUNCTION   : enablePowerHint
1149  *
1150  * DESCRIPTION: enable single powerhint for preview and different video modes.
1151  *
1152  * PARAMETERS :
1153  *
1154  * RETURN     : NULL
1155  *
1156  *==========================================================================*/
enablePowerHint()1157 void QCamera3HardwareInterface::enablePowerHint()
1158 {
1159     if (!mPowerHintEnabled) {
1160         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1161         mPowerHintEnabled = true;
1162     }
1163 }
1164 
1165 /*==============================================================================
1166  * FUNCTION   : disablePowerHint
1167  *
1168  * DESCRIPTION: disable current powerhint.
1169  *
1170  * PARAMETERS :
1171  *
1172  * RETURN     : NULL
1173  *
1174  *==========================================================================*/
disablePowerHint()1175 void QCamera3HardwareInterface::disablePowerHint()
1176 {
1177     if (mPowerHintEnabled) {
1178         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1179         mPowerHintEnabled = false;
1180     }
1181 }
1182 
1183 /*==============================================================================
1184  * FUNCTION   : addToPPFeatureMask
1185  *
1186  * DESCRIPTION: add additional features to pp feature mask based on
1187  *              stream type and usecase
1188  *
1189  * PARAMETERS :
1190  *   @stream_format : stream type for feature mask
1191  *   @stream_idx : stream idx within postprocess_mask list to change
1192  *
1193  * RETURN     : NULL
1194  *
1195  *==========================================================================*/
addToPPFeatureMask(int stream_format,uint32_t stream_idx)1196 void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1197         uint32_t stream_idx)
1198 {
1199     char feature_mask_value[PROPERTY_VALUE_MAX];
1200     cam_feature_mask_t feature_mask;
1201     int args_converted;
1202     int property_len;
1203 
1204     /* Get feature mask from property */
1205     property_len = property_get("persist.camera.hal3.feature",
1206             feature_mask_value, "0");
1207     if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1208             (feature_mask_value[1] == 'x')) {
1209         args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1210     } else {
1211         args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1212     }
1213     if (1 != args_converted) {
1214         feature_mask = 0;
1215         LOGE("Wrong feature mask %s", feature_mask_value);
1216         return;
1217     }
1218 
1219     switch (stream_format) {
1220     case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1221         /* Add LLVD to pp feature mask only if video hint is enabled */
1222         if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1223             mStreamConfigInfo.postprocess_mask[stream_idx]
1224                     |= CAM_QTI_FEATURE_SW_TNR;
1225             LOGH("Added SW TNR to pp feature mask");
1226         } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1227             mStreamConfigInfo.postprocess_mask[stream_idx]
1228                     |= CAM_QCOM_FEATURE_LLVD;
1229             LOGH("Added LLVD SeeMore to pp feature mask");
1230         }
1231         break;
1232     }
1233     default:
1234         break;
1235     }
1236     LOGD("PP feature mask %llx",
1237             mStreamConfigInfo.postprocess_mask[stream_idx]);
1238 }
1239 
1240 /*==============================================================================
1241  * FUNCTION   : updateFpsInPreviewBuffer
1242  *
1243  * DESCRIPTION: update FPS information in preview buffer.
1244  *
1245  * PARAMETERS :
1246  *   @metadata    : pointer to metadata buffer
1247  *   @frame_number: frame_number to look for in pending buffer list
1248  *
1249  * RETURN     : None
1250  *
1251  *==========================================================================*/
updateFpsInPreviewBuffer(metadata_buffer_t * metadata,uint32_t frame_number)1252 void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1253         uint32_t frame_number)
1254 {
1255     // Mark all pending buffers for this particular request
1256     // with corresponding framerate information
1257     for (List<PendingBuffersInRequest>::iterator req =
1258             mPendingBuffersMap.mPendingBuffersInRequest.begin();
1259             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1260         for(List<PendingBufferInfo>::iterator j =
1261                 req->mPendingBufferList.begin();
1262                 j != req->mPendingBufferList.end(); j++) {
1263             QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1264             if ((req->frame_number == frame_number) &&
1265                 (channel->getStreamTypeMask() &
1266                 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1267                 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1268                     CAM_INTF_PARM_FPS_RANGE, metadata) {
1269                     int32_t cameraFps = float_range->max_fps;
1270                     struct private_handle_t *priv_handle =
1271                         (struct private_handle_t *)(*(j->buffer));
1272                     setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1273                 }
1274             }
1275         }
1276     }
1277 }
1278 
1279 /*==============================================================================
1280  * FUNCTION   : updateTimeStampInPendingBuffers
1281  *
1282  * DESCRIPTION: update timestamp in display metadata for all pending buffers
1283  *              of a frame number
1284  *
1285  * PARAMETERS :
1286  *   @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1287  *   @timestamp   : timestamp to be set
1288  *
1289  * RETURN     : None
1290  *
1291  *==========================================================================*/
updateTimeStampInPendingBuffers(uint32_t frameNumber,nsecs_t timestamp)1292 void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1293         uint32_t frameNumber, nsecs_t timestamp)
1294 {
1295     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1296             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1297         if (req->frame_number != frameNumber)
1298             continue;
1299 
1300         for (auto k = req->mPendingBufferList.begin();
1301                 k != req->mPendingBufferList.end(); k++ ) {
1302             struct private_handle_t *priv_handle =
1303                     (struct private_handle_t *) (*(k->buffer));
1304             setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1305         }
1306     }
1307     return;
1308 }
1309 
1310 /*===========================================================================
1311  * FUNCTION   : configureStreams
1312  *
1313  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1314  *              and output streams.
1315  *
1316  * PARAMETERS :
1317  *   @stream_list : streams to be configured
1318  *
1319  * RETURN     :
1320  *
1321  *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)1322 int QCamera3HardwareInterface::configureStreams(
1323         camera3_stream_configuration_t *streamList)
1324 {
1325     ATRACE_CALL();
1326     int rc = 0;
1327 
1328     // Acquire perfLock before configure streams
1329     m_perfLock.lock_acq();
1330     rc = configureStreamsPerfLocked(streamList);
1331     m_perfLock.lock_rel();
1332 
1333     return rc;
1334 }
1335 
1336 /*===========================================================================
1337  * FUNCTION   : configureStreamsPerfLocked
1338  *
1339  * DESCRIPTION: configureStreams while perfLock is held.
1340  *
1341  * PARAMETERS :
1342  *   @stream_list : streams to be configured
1343  *
1344  * RETURN     : int32_t type of status
1345  *              NO_ERROR  -- success
1346  *              none-zero failure code
1347  *==========================================================================*/
configureStreamsPerfLocked(camera3_stream_configuration_t * streamList)1348 int QCamera3HardwareInterface::configureStreamsPerfLocked(
1349         camera3_stream_configuration_t *streamList)
1350 {
1351     ATRACE_CALL();
1352     int rc = 0;
1353 
1354     // Sanity check stream_list
1355     if (streamList == NULL) {
1356         LOGE("NULL stream configuration");
1357         return BAD_VALUE;
1358     }
1359     if (streamList->streams == NULL) {
1360         LOGE("NULL stream list");
1361         return BAD_VALUE;
1362     }
1363 
1364     if (streamList->num_streams < 1) {
1365         LOGE("Bad number of streams requested: %d",
1366                 streamList->num_streams);
1367         return BAD_VALUE;
1368     }
1369 
1370     if (streamList->num_streams >= MAX_NUM_STREAMS) {
1371         LOGE("Maximum number of streams %d exceeded: %d",
1372                 MAX_NUM_STREAMS, streamList->num_streams);
1373         return BAD_VALUE;
1374     }
1375 
1376     mOpMode = streamList->operation_mode;
1377     LOGD("mOpMode: %d", mOpMode);
1378 
1379     /* first invalidate all the steams in the mStreamList
1380      * if they appear again, they will be validated */
1381     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1382             it != mStreamInfo.end(); it++) {
1383         QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1384         if (channel) {
1385           channel->stop();
1386         }
1387         (*it)->status = INVALID;
1388     }
1389 
1390     if (mRawDumpChannel) {
1391         mRawDumpChannel->stop();
1392         delete mRawDumpChannel;
1393         mRawDumpChannel = NULL;
1394     }
1395 
1396     if (mSupportChannel)
1397         mSupportChannel->stop();
1398 
1399     if (mAnalysisChannel) {
1400         mAnalysisChannel->stop();
1401     }
1402     if (mMetadataChannel) {
1403         /* If content of mStreamInfo is not 0, there is metadata stream */
1404         mMetadataChannel->stop();
1405     }
1406     if (mChannelHandle) {
1407         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1408                 mChannelHandle);
1409         LOGD("stopping channel %d", mChannelHandle);
1410     }
1411 
1412     pthread_mutex_lock(&mMutex);
1413 
1414     // Check state
1415     switch (mState) {
1416         case INITIALIZED:
1417         case CONFIGURED:
1418         case STARTED:
1419             /* valid state */
1420             break;
1421         default:
1422             LOGE("Invalid state %d", mState);
1423             pthread_mutex_unlock(&mMutex);
1424             return -ENODEV;
1425     }
1426 
1427     /* Check whether we have video stream */
1428     m_bIs4KVideo = false;
1429     m_bIsVideo = false;
1430     m_bEisSupportedSize = false;
1431     m_bTnrEnabled = false;
1432     bool isZsl = false;
1433     uint32_t videoWidth = 0U;
1434     uint32_t videoHeight = 0U;
1435     size_t rawStreamCnt = 0;
1436     size_t stallStreamCnt = 0;
1437     size_t processedStreamCnt = 0;
1438     // Number of streams on ISP encoder path
1439     size_t numStreamsOnEncoder = 0;
1440     size_t numYuv888OnEncoder = 0;
1441     bool bYuv888OverrideJpeg = false;
1442     cam_dimension_t largeYuv888Size = {0, 0};
1443     cam_dimension_t maxViewfinderSize = {0, 0};
1444     bool bJpegExceeds4K = false;
1445     bool bJpegOnEncoder = false;
1446     bool bUseCommonFeatureMask = false;
1447     cam_feature_mask_t commonFeatureMask = 0;
1448     bool bSmallJpegSize = false;
1449     uint32_t width_ratio;
1450     uint32_t height_ratio;
1451     maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1452     camera3_stream_t *inputStream = NULL;
1453     bool isJpeg = false;
1454     cam_dimension_t jpegSize = {0, 0};
1455 
1456     cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1457 
1458     /*EIS configuration*/
1459     bool eisSupported = false;
1460     bool oisSupported = false;
1461     int32_t margin_index = -1;
1462     uint8_t eis_prop_set;
1463     uint32_t maxEisWidth = 0;
1464     uint32_t maxEisHeight = 0;
1465 
1466     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1467 
1468     size_t count = IS_TYPE_MAX;
1469     count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1470     for (size_t i = 0; i < count; i++) {
1471         if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1472             eisSupported = true;
1473             margin_index = (int32_t)i;
1474             break;
1475         }
1476     }
1477 
1478     count = CAM_OPT_STAB_MAX;
1479     count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1480     for (size_t i = 0; i < count; i++) {
1481         if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1482             oisSupported = true;
1483             break;
1484         }
1485     }
1486 
1487     if (eisSupported) {
1488         maxEisWidth = MAX_EIS_WIDTH;
1489         maxEisHeight = MAX_EIS_HEIGHT;
1490     }
1491 
1492     /* EIS setprop control */
1493     char eis_prop[PROPERTY_VALUE_MAX];
1494     memset(eis_prop, 0, sizeof(eis_prop));
1495     property_get("persist.camera.eis.enable", eis_prop, "0");
1496     eis_prop_set = (uint8_t)atoi(eis_prop);
1497 
1498     m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1499             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1500 
1501     /* stream configurations */
1502     for (size_t i = 0; i < streamList->num_streams; i++) {
1503         camera3_stream_t *newStream = streamList->streams[i];
1504         LOGI("stream[%d] type = %d, format = %d, width = %d, "
1505                 "height = %d, rotation = %d, usage = 0x%x",
1506                  i, newStream->stream_type, newStream->format,
1507                 newStream->width, newStream->height, newStream->rotation,
1508                 newStream->usage);
1509         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1510                 newStream->stream_type == CAMERA3_STREAM_INPUT){
1511             isZsl = true;
1512         }
1513         if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1514             inputStream = newStream;
1515         }
1516 
1517         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1518             isJpeg = true;
1519             jpegSize.width = newStream->width;
1520             jpegSize.height = newStream->height;
1521             if (newStream->width > VIDEO_4K_WIDTH ||
1522                     newStream->height > VIDEO_4K_HEIGHT)
1523                 bJpegExceeds4K = true;
1524         }
1525 
1526         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1527                 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1528             m_bIsVideo = true;
1529             videoWidth = newStream->width;
1530             videoHeight = newStream->height;
1531             if ((VIDEO_4K_WIDTH <= newStream->width) &&
1532                     (VIDEO_4K_HEIGHT <= newStream->height)) {
1533                 m_bIs4KVideo = true;
1534             }
1535             m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1536                                   (newStream->height <= maxEisHeight);
1537         }
1538         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1539                 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1540             switch (newStream->format) {
1541             case HAL_PIXEL_FORMAT_BLOB:
1542                 stallStreamCnt++;
1543                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1544                         newStream->height)) {
1545                     numStreamsOnEncoder++;
1546                     bJpegOnEncoder = true;
1547                 }
1548                 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1549                         newStream->width);
1550                 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1551                         newStream->height);;
1552                 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1553                         "FATAL: max_downscale_factor cannot be zero and so assert");
1554                 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1555                     (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1556                     LOGH("Setting small jpeg size flag to true");
1557                     bSmallJpegSize = true;
1558                 }
1559                 break;
1560             case HAL_PIXEL_FORMAT_RAW10:
1561             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1562             case HAL_PIXEL_FORMAT_RAW16:
1563                 rawStreamCnt++;
1564                 break;
1565             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1566                 processedStreamCnt++;
1567                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1568                         newStream->height)) {
1569                     if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1570                             !IS_USAGE_ZSL(newStream->usage)) {
1571                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1572                     }
1573                     numStreamsOnEncoder++;
1574                 }
1575                 break;
1576             case HAL_PIXEL_FORMAT_YCbCr_420_888:
1577                 processedStreamCnt++;
1578                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1579                         newStream->height)) {
1580                     // If Yuv888 size is not greater than 4K, set feature mask
1581                     // to SUPERSET so that it support concurrent request on
1582                     // YUV and JPEG.
1583                     if (newStream->width <= VIDEO_4K_WIDTH &&
1584                             newStream->height <= VIDEO_4K_HEIGHT) {
1585                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1586                     }
1587                     numStreamsOnEncoder++;
1588                     numYuv888OnEncoder++;
1589                     largeYuv888Size.width = newStream->width;
1590                     largeYuv888Size.height = newStream->height;
1591                 }
1592                 break;
1593             default:
1594                 processedStreamCnt++;
1595                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1596                         newStream->height)) {
1597                     commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1598                     numStreamsOnEncoder++;
1599                 }
1600                 break;
1601             }
1602 
1603         }
1604     }
1605 
1606     if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1607         !m_bIsVideo) {
1608         m_bEisEnable = false;
1609     }
1610 
1611     /* Logic to enable/disable TNR based on specific config size/etc.*/
1612     if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1613             ((videoWidth == 1920 && videoHeight == 1080) ||
1614             (videoWidth == 1280 && videoHeight == 720)) &&
1615             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1616         m_bTnrEnabled = true;
1617 
1618     /* Check if num_streams is sane */
1619     if (stallStreamCnt > MAX_STALLING_STREAMS ||
1620             rawStreamCnt > MAX_RAW_STREAMS ||
1621             processedStreamCnt > MAX_PROCESSED_STREAMS) {
1622         LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1623                  stallStreamCnt, rawStreamCnt, processedStreamCnt);
1624         pthread_mutex_unlock(&mMutex);
1625         return -EINVAL;
1626     }
1627     /* Check whether we have zsl stream or 4k video case */
1628     if (isZsl && m_bIsVideo) {
1629         LOGE("Currently invalid configuration ZSL&Video!");
1630         pthread_mutex_unlock(&mMutex);
1631         return -EINVAL;
1632     }
1633     /* Check if stream sizes are sane */
1634     if (numStreamsOnEncoder > 2) {
1635         LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1636         pthread_mutex_unlock(&mMutex);
1637         return -EINVAL;
1638     } else if (1 < numStreamsOnEncoder){
1639         bUseCommonFeatureMask = true;
1640         LOGH("Multiple streams above max viewfinder size, common mask needed");
1641     }
1642 
1643     /* Check if BLOB size is greater than 4k in 4k recording case */
1644     if (m_bIs4KVideo && bJpegExceeds4K) {
1645         LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1646         pthread_mutex_unlock(&mMutex);
1647         return -EINVAL;
1648     }
1649 
1650     // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1651     // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1652     // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1653     // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1654     // configurations:
1655     //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1656     //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1657     //    (These two configurations will not have CAC2 enabled even in HQ modes.)
1658     if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1659         ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1660                 __func__);
1661         pthread_mutex_unlock(&mMutex);
1662         return -EINVAL;
1663     }
1664 
1665     // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1666     // the YUV stream's size is greater or equal to the JPEG size, set common
1667     // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1668     if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1669             jpegSize.width, jpegSize.height) &&
1670             largeYuv888Size.width > jpegSize.width &&
1671             largeYuv888Size.height > jpegSize.height) {
1672         bYuv888OverrideJpeg = true;
1673     } else if (!isJpeg && numStreamsOnEncoder > 1) {
1674         commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1675     }
1676 
1677     LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1678             maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1679             commonFeatureMask);
1680     LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1681             numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1682 
1683     rc = validateStreamDimensions(streamList);
1684     if (rc == NO_ERROR) {
1685         rc = validateStreamRotations(streamList);
1686     }
1687     if (rc != NO_ERROR) {
1688         LOGE("Invalid stream configuration requested!");
1689         pthread_mutex_unlock(&mMutex);
1690         return rc;
1691     }
1692 
1693     camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1694     camera3_stream_t *jpegStream = NULL;
1695     for (size_t i = 0; i < streamList->num_streams; i++) {
1696         camera3_stream_t *newStream = streamList->streams[i];
1697         LOGH("newStream type = %d, stream format = %d "
1698                 "stream size : %d x %d, stream rotation = %d",
1699                  newStream->stream_type, newStream->format,
1700                 newStream->width, newStream->height, newStream->rotation);
1701         //if the stream is in the mStreamList validate it
1702         bool stream_exists = false;
1703         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1704                 it != mStreamInfo.end(); it++) {
1705             if ((*it)->stream == newStream) {
1706                 QCamera3ProcessingChannel *channel =
1707                     (QCamera3ProcessingChannel*)(*it)->stream->priv;
1708                 stream_exists = true;
1709                 if (channel)
1710                     delete channel;
1711                 (*it)->status = VALID;
1712                 (*it)->stream->priv = NULL;
1713                 (*it)->channel = NULL;
1714             }
1715         }
1716         if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1717             //new stream
1718             stream_info_t* stream_info;
1719             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1720             if (!stream_info) {
1721                LOGE("Could not allocate stream info");
1722                rc = -ENOMEM;
1723                pthread_mutex_unlock(&mMutex);
1724                return rc;
1725             }
1726             stream_info->stream = newStream;
1727             stream_info->status = VALID;
1728             stream_info->channel = NULL;
1729             mStreamInfo.push_back(stream_info);
1730         }
1731         /* Covers Opaque ZSL and API1 F/W ZSL */
1732         if (IS_USAGE_ZSL(newStream->usage)
1733                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1734             if (zslStream != NULL) {
1735                 LOGE("Multiple input/reprocess streams requested!");
1736                 pthread_mutex_unlock(&mMutex);
1737                 return BAD_VALUE;
1738             }
1739             zslStream = newStream;
1740         }
1741         /* Covers YUV reprocess */
1742         if (inputStream != NULL) {
1743             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1744                     && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1745                     && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1746                     && inputStream->width == newStream->width
1747                     && inputStream->height == newStream->height) {
1748                 if (zslStream != NULL) {
1749                     /* This scenario indicates multiple YUV streams with same size
1750                      * as input stream have been requested, since zsl stream handle
1751                      * is solely use for the purpose of overriding the size of streams
1752                      * which share h/w streams we will just make a guess here as to
1753                      * which of the stream is a ZSL stream, this will be refactored
1754                      * once we make generic logic for streams sharing encoder output
1755                      */
1756                     LOGH("Warning, Multiple ip/reprocess streams requested!");
1757                 }
1758                 zslStream = newStream;
1759             }
1760         }
1761         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1762             jpegStream = newStream;
1763         }
1764     }
1765 
1766     /* If a zsl stream is set, we know that we have configured at least one input or
1767        bidirectional stream */
1768     if (NULL != zslStream) {
1769         mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1770         mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1771         mInputStreamInfo.format = zslStream->format;
1772         mInputStreamInfo.usage = zslStream->usage;
1773         LOGD("Input stream configured! %d x %d, format %d, usage %d",
1774                  mInputStreamInfo.dim.width,
1775                 mInputStreamInfo.dim.height,
1776                 mInputStreamInfo.format, mInputStreamInfo.usage);
1777     }
1778 
1779     cleanAndSortStreamInfo();
1780     if (mMetadataChannel) {
1781         delete mMetadataChannel;
1782         mMetadataChannel = NULL;
1783     }
1784     if (mSupportChannel) {
1785         delete mSupportChannel;
1786         mSupportChannel = NULL;
1787     }
1788 
1789     if (mAnalysisChannel) {
1790         delete mAnalysisChannel;
1791         mAnalysisChannel = NULL;
1792     }
1793 
1794     if (mDummyBatchChannel) {
1795         delete mDummyBatchChannel;
1796         mDummyBatchChannel = NULL;
1797     }
1798 
1799     //Create metadata channel and initialize it
1800     cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1801     setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1802             gCamCapability[mCameraId]->color_arrangement);
1803     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1804                     mChannelHandle, mCameraHandle->ops, captureResultCb,
1805                     setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
1806     if (mMetadataChannel == NULL) {
1807         LOGE("failed to allocate metadata channel");
1808         rc = -ENOMEM;
1809         pthread_mutex_unlock(&mMutex);
1810         return rc;
1811     }
1812     rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1813     if (rc < 0) {
1814         LOGE("metadata channel initialization failed");
1815         delete mMetadataChannel;
1816         mMetadataChannel = NULL;
1817         pthread_mutex_unlock(&mMutex);
1818         return rc;
1819     }
1820 
1821     // Create analysis stream all the time, even when h/w support is not available
1822     {
1823         cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1824         cam_analysis_info_t analysisInfo;
1825         rc = mCommon.getAnalysisInfo(
1826                 FALSE,
1827                 TRUE,
1828                 analysisFeatureMask,
1829                 &analysisInfo);
1830         if (rc != NO_ERROR) {
1831             LOGE("getAnalysisInfo failed, ret = %d", rc);
1832             pthread_mutex_unlock(&mMutex);
1833             return rc;
1834         }
1835 
1836         cam_color_filter_arrangement_t analysis_color_arrangement =
1837                 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
1838                 CAM_FILTER_ARRANGEMENT_Y :
1839                 gCamCapability[mCameraId]->color_arrangement);
1840         setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
1841                 analysis_color_arrangement);
1842 
1843         mAnalysisChannel = new QCamera3SupportChannel(
1844                 mCameraHandle->camera_handle,
1845                 mChannelHandle,
1846                 mCameraHandle->ops,
1847                 &analysisInfo.analysis_padding_info,
1848                 analysisFeatureMask,
1849                 CAM_STREAM_TYPE_ANALYSIS,
1850                 &analysisInfo.analysis_max_res,
1851                 (analysisInfo.analysis_format
1852                 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1853                 : CAM_FORMAT_YUV_420_NV21),
1854                 analysisInfo.hw_analysis_supported,
1855                 this,
1856                 0); // force buffer count to 0
1857         if (!mAnalysisChannel) {
1858             LOGE("H/W Analysis channel cannot be created");
1859             pthread_mutex_unlock(&mMutex);
1860             return -ENOMEM;
1861         }
1862     }
1863 
1864     bool isRawStreamRequested = false;
1865     memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1866     /* Allocate channel objects for the requested streams */
1867     for (size_t i = 0; i < streamList->num_streams; i++) {
1868         camera3_stream_t *newStream = streamList->streams[i];
1869         uint32_t stream_usage = newStream->usage;
1870         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1871         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1872         if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1873                 || IS_USAGE_ZSL(newStream->usage)) &&
1874             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1875             mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1876             if (bUseCommonFeatureMask) {
1877                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1878                         commonFeatureMask;
1879             } else {
1880                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1881                         CAM_QCOM_FEATURE_NONE;
1882             }
1883 
1884         } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1885                 LOGH("Input stream configured, reprocess config");
1886         } else {
1887             //for non zsl streams find out the format
1888             switch (newStream->format) {
1889             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1890             {
1891                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1892                         CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1893                 /* add additional features to pp feature mask */
1894                 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1895                         mStreamConfigInfo.num_streams);
1896 
1897                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1898                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1899                                 CAM_STREAM_TYPE_VIDEO;
1900                     if (m_bTnrEnabled && m_bTnrVideo) {
1901                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1902                             CAM_QCOM_FEATURE_CPP_TNR;
1903                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1904                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1905                                 ~CAM_QCOM_FEATURE_CDS;
1906                     }
1907                 } else {
1908                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1909                             CAM_STREAM_TYPE_PREVIEW;
1910                     if (m_bTnrEnabled && m_bTnrPreview) {
1911                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1912                                 CAM_QCOM_FEATURE_CPP_TNR;
1913                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1914                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1915                                 ~CAM_QCOM_FEATURE_CDS;
1916                     }
1917                     padding_info.width_padding = mSurfaceStridePadding;
1918                     padding_info.height_padding = CAM_PAD_TO_2;
1919                 }
1920                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1921                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1922                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1923                             newStream->height;
1924                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1925                             newStream->width;
1926                 }
1927             }
1928             break;
1929             case HAL_PIXEL_FORMAT_YCbCr_420_888:
1930                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1931                 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1932                     if (bUseCommonFeatureMask)
1933                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1934                                 commonFeatureMask;
1935                     else
1936                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1937                                 CAM_QCOM_FEATURE_NONE;
1938                 } else {
1939                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1940                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1941                 }
1942             break;
1943             case HAL_PIXEL_FORMAT_BLOB:
1944                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1945                 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1946                 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1947                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1948                              CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1949                 } else {
1950                     if (bUseCommonFeatureMask &&
1951                             isOnEncoder(maxViewfinderSize, newStream->width,
1952                             newStream->height)) {
1953                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1954                     } else {
1955                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1956                     }
1957                 }
1958                 if (isZsl) {
1959                     if (zslStream) {
1960                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1961                                 (int32_t)zslStream->width;
1962                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1963                                 (int32_t)zslStream->height;
1964                     } else {
1965                         LOGE("Error, No ZSL stream identified");
1966                         pthread_mutex_unlock(&mMutex);
1967                         return -EINVAL;
1968                     }
1969                 } else if (m_bIs4KVideo) {
1970                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
1971                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
1972                 } else if (bYuv888OverrideJpeg) {
1973                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1974                             (int32_t)largeYuv888Size.width;
1975                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1976                             (int32_t)largeYuv888Size.height;
1977                 }
1978                 break;
1979             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1980             case HAL_PIXEL_FORMAT_RAW16:
1981             case HAL_PIXEL_FORMAT_RAW10:
1982                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1983                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1984                 isRawStreamRequested = true;
1985                 break;
1986             default:
1987                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1988                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1989                 break;
1990             }
1991         }
1992 
1993         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1994                 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1995                 gCamCapability[mCameraId]->color_arrangement);
1996 
1997         if (newStream->priv == NULL) {
1998             //New stream, construct channel
1999             switch (newStream->stream_type) {
2000             case CAMERA3_STREAM_INPUT:
2001                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2002                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2003                 break;
2004             case CAMERA3_STREAM_BIDIRECTIONAL:
2005                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2006                     GRALLOC_USAGE_HW_CAMERA_WRITE;
2007                 break;
2008             case CAMERA3_STREAM_OUTPUT:
2009                 /* For video encoding stream, set read/write rarely
2010                  * flag so that they may be set to un-cached */
2011                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2012                     newStream->usage |=
2013                          (GRALLOC_USAGE_SW_READ_RARELY |
2014                          GRALLOC_USAGE_SW_WRITE_RARELY |
2015                          GRALLOC_USAGE_HW_CAMERA_WRITE);
2016                 else if (IS_USAGE_ZSL(newStream->usage))
2017                 {
2018                     LOGD("ZSL usage flag skipping");
2019                 }
2020                 else if (newStream == zslStream
2021                         || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2022                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2023                 } else
2024                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2025                 break;
2026             default:
2027                 LOGE("Invalid stream_type %d", newStream->stream_type);
2028                 break;
2029             }
2030 
2031             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2032                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2033                 QCamera3ProcessingChannel *channel = NULL;
2034                 switch (newStream->format) {
2035                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2036                     if ((newStream->usage &
2037                             private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2038                             (streamList->operation_mode ==
2039                             CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2040                     ) {
2041                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2042                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
2043                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2044                                 this,
2045                                 newStream,
2046                                 (cam_stream_type_t)
2047                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2048                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2049                                 mMetadataChannel,
2050                                 0); //heap buffers are not required for HFR video channel
2051                         if (channel == NULL) {
2052                             LOGE("allocation of channel failed");
2053                             pthread_mutex_unlock(&mMutex);
2054                             return -ENOMEM;
2055                         }
2056                         //channel->getNumBuffers() will return 0 here so use
2057                         //MAX_INFLIGH_HFR_REQUESTS
2058                         newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2059                         newStream->priv = channel;
2060                         LOGI("num video buffers in HFR mode: %d",
2061                                  MAX_INFLIGHT_HFR_REQUESTS);
2062                     } else {
2063                         /* Copy stream contents in HFR preview only case to create
2064                          * dummy batch channel so that sensor streaming is in
2065                          * HFR mode */
2066                         if (!m_bIsVideo && (streamList->operation_mode ==
2067                                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2068                             mDummyBatchStream = *newStream;
2069                         }
2070                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2071                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
2072                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2073                                 this,
2074                                 newStream,
2075                                 (cam_stream_type_t)
2076                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2077                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2078                                 mMetadataChannel,
2079                                 MAX_INFLIGHT_REQUESTS);
2080                         if (channel == NULL) {
2081                             LOGE("allocation of channel failed");
2082                             pthread_mutex_unlock(&mMutex);
2083                             return -ENOMEM;
2084                         }
2085                         newStream->max_buffers = MAX_INFLIGHT_60FPS_REQUESTS;
2086                         newStream->priv = channel;
2087                     }
2088                     break;
2089                 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2090                     channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2091                             mChannelHandle,
2092                             mCameraHandle->ops, captureResultCb,
2093                             setBufferErrorStatus, &padding_info,
2094                             this,
2095                             newStream,
2096                             (cam_stream_type_t)
2097                                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2098                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2099                             mMetadataChannel);
2100                     if (channel == NULL) {
2101                         LOGE("allocation of YUV channel failed");
2102                         pthread_mutex_unlock(&mMutex);
2103                         return -ENOMEM;
2104                     }
2105                     newStream->max_buffers = channel->getNumBuffers();
2106                     newStream->priv = channel;
2107                     break;
2108                 }
2109                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2110                 case HAL_PIXEL_FORMAT_RAW16:
2111                 case HAL_PIXEL_FORMAT_RAW10:
2112                     mRawChannel = new QCamera3RawChannel(
2113                             mCameraHandle->camera_handle, mChannelHandle,
2114                             mCameraHandle->ops, captureResultCb,
2115                             setBufferErrorStatus, &padding_info,
2116                             this, newStream,
2117                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2118                             mMetadataChannel,
2119                             (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2120                     if (mRawChannel == NULL) {
2121                         LOGE("allocation of raw channel failed");
2122                         pthread_mutex_unlock(&mMutex);
2123                         return -ENOMEM;
2124                     }
2125                     newStream->max_buffers = mRawChannel->getNumBuffers();
2126                     newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2127                     break;
2128                 case HAL_PIXEL_FORMAT_BLOB:
2129                     // Max live snapshot inflight buffer is 1. This is to mitigate
2130                     // frame drop issues for video snapshot. The more buffers being
2131                     // allocated, the more frame drops there are.
2132                     mPictureChannel = new QCamera3PicChannel(
2133                             mCameraHandle->camera_handle, mChannelHandle,
2134                             mCameraHandle->ops, captureResultCb,
2135                             setBufferErrorStatus, &padding_info, this, newStream,
2136                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2137                             m_bIs4KVideo, isZsl, mMetadataChannel,
2138                             (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2139                     if (mPictureChannel == NULL) {
2140                         LOGE("allocation of channel failed");
2141                         pthread_mutex_unlock(&mMutex);
2142                         return -ENOMEM;
2143                     }
2144                     newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2145                     newStream->max_buffers = mPictureChannel->getNumBuffers();
2146                     mPictureChannel->overrideYuvSize(
2147                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2148                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2149                     break;
2150 
2151                 default:
2152                     LOGE("not a supported format 0x%x", newStream->format);
2153                     break;
2154                 }
2155             } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2156                 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2157             } else {
2158                 LOGE("Error, Unknown stream type");
2159                 pthread_mutex_unlock(&mMutex);
2160                 return -EINVAL;
2161             }
2162 
2163             QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2164             if (channel != NULL && channel->isUBWCEnabled()) {
2165                 cam_format_t fmt = channel->getStreamDefaultFormat(
2166                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2167                         newStream->width, newStream->height);
2168                 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2169                     newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2170                 }
2171             }
2172 
2173             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2174                     it != mStreamInfo.end(); it++) {
2175                 if ((*it)->stream == newStream) {
2176                     (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2177                     break;
2178                 }
2179             }
2180         } else {
2181             // Channel already exists for this stream
2182             // Do nothing for now
2183         }
2184         padding_info = gCamCapability[mCameraId]->padding_info;
2185 
2186         /* Do not add entries for input stream in metastream info
2187          * since there is no real stream associated with it
2188          */
2189         if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2190             mStreamConfigInfo.num_streams++;
2191     }
2192 
2193     //RAW DUMP channel
2194     if (mEnableRawDump && isRawStreamRequested == false){
2195         cam_dimension_t rawDumpSize;
2196         rawDumpSize = getMaxRawSize(mCameraId);
2197         cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2198         setPAAFSupport(rawDumpFeatureMask,
2199                 CAM_STREAM_TYPE_RAW,
2200                 gCamCapability[mCameraId]->color_arrangement);
2201         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2202                                   mChannelHandle,
2203                                   mCameraHandle->ops,
2204                                   rawDumpSize,
2205                                   &padding_info,
2206                                   this, rawDumpFeatureMask);
2207         if (!mRawDumpChannel) {
2208             LOGE("Raw Dump channel cannot be created");
2209             pthread_mutex_unlock(&mMutex);
2210             return -ENOMEM;
2211         }
2212     }
2213 
2214 
2215     if (mAnalysisChannel) {
2216         cam_analysis_info_t analysisInfo;
2217         memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2218         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2219                 CAM_STREAM_TYPE_ANALYSIS;
2220         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2221                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2222         rc = mCommon.getAnalysisInfo(FALSE, TRUE,
2223                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2224                 &analysisInfo);
2225         if (rc != NO_ERROR) {
2226             LOGE("getAnalysisInfo failed, ret = %d", rc);
2227             pthread_mutex_unlock(&mMutex);
2228             return rc;
2229         }
2230         cam_color_filter_arrangement_t analysis_color_arrangement =
2231                 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2232                 CAM_FILTER_ARRANGEMENT_Y :
2233                 gCamCapability[mCameraId]->color_arrangement);
2234         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2235                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2236                 analysis_color_arrangement);
2237 
2238         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2239                 analysisInfo.analysis_max_res;
2240         mStreamConfigInfo.num_streams++;
2241     }
2242 
2243     if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2244         cam_analysis_info_t supportInfo;
2245         memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2246         cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2247         setPAAFSupport(callbackFeatureMask,
2248                 CAM_STREAM_TYPE_CALLBACK,
2249                 gCamCapability[mCameraId]->color_arrangement);
2250         rc = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
2251         if (rc != NO_ERROR) {
2252             LOGE("getAnalysisInfo failed, ret = %d", rc);
2253             pthread_mutex_unlock(&mMutex);
2254             return rc;
2255         }
2256         mSupportChannel = new QCamera3SupportChannel(
2257                 mCameraHandle->camera_handle,
2258                 mChannelHandle,
2259                 mCameraHandle->ops,
2260                 &gCamCapability[mCameraId]->padding_info,
2261                 callbackFeatureMask,
2262                 CAM_STREAM_TYPE_CALLBACK,
2263                 &QCamera3SupportChannel::kDim,
2264                 CAM_FORMAT_YUV_420_NV21,
2265                 supportInfo.hw_analysis_supported,
2266                 this, 0);
2267         if (!mSupportChannel) {
2268             LOGE("dummy channel cannot be created");
2269             pthread_mutex_unlock(&mMutex);
2270             return -ENOMEM;
2271         }
2272     }
2273 
2274     if (mSupportChannel) {
2275         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2276                 QCamera3SupportChannel::kDim;
2277         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2278                 CAM_STREAM_TYPE_CALLBACK;
2279         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2280                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2281         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2282                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2283                 gCamCapability[mCameraId]->color_arrangement);
2284         mStreamConfigInfo.num_streams++;
2285     }
2286 
2287     if (mRawDumpChannel) {
2288         cam_dimension_t rawSize;
2289         rawSize = getMaxRawSize(mCameraId);
2290         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2291                 rawSize;
2292         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2293                 CAM_STREAM_TYPE_RAW;
2294         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2295                 CAM_QCOM_FEATURE_NONE;
2296         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2297                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2298                 gCamCapability[mCameraId]->color_arrangement);
2299         mStreamConfigInfo.num_streams++;
2300     }
2301     /* In HFR mode, if video stream is not added, create a dummy channel so that
2302      * ISP can create a batch mode even for preview only case. This channel is
2303      * never 'start'ed (no stream-on), it is only 'initialized'  */
2304     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2305             !m_bIsVideo) {
2306         cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2307         setPAAFSupport(dummyFeatureMask,
2308                 CAM_STREAM_TYPE_VIDEO,
2309                 gCamCapability[mCameraId]->color_arrangement);
2310         mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2311                 mChannelHandle,
2312                 mCameraHandle->ops, captureResultCb,
2313                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2314                 this,
2315                 &mDummyBatchStream,
2316                 CAM_STREAM_TYPE_VIDEO,
2317                 dummyFeatureMask,
2318                 mMetadataChannel);
2319         if (NULL == mDummyBatchChannel) {
2320             LOGE("creation of mDummyBatchChannel failed."
2321                     "Preview will use non-hfr sensor mode ");
2322         }
2323     }
2324     if (mDummyBatchChannel) {
2325         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2326                 mDummyBatchStream.width;
2327         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2328                 mDummyBatchStream.height;
2329         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2330                 CAM_STREAM_TYPE_VIDEO;
2331         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2332                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2333         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2334                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2335                 gCamCapability[mCameraId]->color_arrangement);
2336         mStreamConfigInfo.num_streams++;
2337     }
2338 
2339     mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2340     mStreamConfigInfo.buffer_info.max_buffers =
2341             m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2342 
2343     /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2344     for (pendingRequestIterator i = mPendingRequestsList.begin();
2345             i != mPendingRequestsList.end();) {
2346         i = erasePendingRequest(i);
2347     }
2348     mPendingFrameDropList.clear();
2349     // Initialize/Reset the pending buffers list
2350     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2351         req.mPendingBufferList.clear();
2352     }
2353     mPendingBuffersMap.mPendingBuffersInRequest.clear();
2354 
2355     mPendingReprocessResultList.clear();
2356 
2357     mCurJpegMeta.clear();
2358     //Get min frame duration for this streams configuration
2359     deriveMinFrameDuration();
2360 
2361     // Update state
2362     mState = CONFIGURED;
2363 
2364     pthread_mutex_unlock(&mMutex);
2365 
2366     return rc;
2367 }
2368 
2369 /*===========================================================================
2370  * FUNCTION   : validateCaptureRequest
2371  *
2372  * DESCRIPTION: validate a capture request from camera service
2373  *
2374  * PARAMETERS :
2375  *   @request : request from framework to process
2376  *
2377  * RETURN     :
2378  *
2379  *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request)2380 int QCamera3HardwareInterface::validateCaptureRequest(
2381                     camera3_capture_request_t *request)
2382 {
2383     ssize_t idx = 0;
2384     const camera3_stream_buffer_t *b;
2385     CameraMetadata meta;
2386 
2387     /* Sanity check the request */
2388     if (request == NULL) {
2389         LOGE("NULL capture request");
2390         return BAD_VALUE;
2391     }
2392 
2393     if ((request->settings == NULL) && (mState == CONFIGURED)) {
2394         /*settings cannot be null for the first request*/
2395         return BAD_VALUE;
2396     }
2397 
2398     uint32_t frameNumber = request->frame_number;
2399     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2400         LOGE("Request %d: No output buffers provided!",
2401                 __FUNCTION__, frameNumber);
2402         return BAD_VALUE;
2403     }
2404     if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2405         LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2406                  request->num_output_buffers, MAX_NUM_STREAMS);
2407         return BAD_VALUE;
2408     }
2409     if (request->input_buffer != NULL) {
2410         b = request->input_buffer;
2411         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2412             LOGE("Request %d: Buffer %ld: Status not OK!",
2413                      frameNumber, (long)idx);
2414             return BAD_VALUE;
2415         }
2416         if (b->release_fence != -1) {
2417             LOGE("Request %d: Buffer %ld: Has a release fence!",
2418                      frameNumber, (long)idx);
2419             return BAD_VALUE;
2420         }
2421         if (b->buffer == NULL) {
2422             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2423                      frameNumber, (long)idx);
2424             return BAD_VALUE;
2425         }
2426     }
2427 
2428     // Validate all buffers
2429     b = request->output_buffers;
2430     do {
2431         QCamera3ProcessingChannel *channel =
2432                 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2433         if (channel == NULL) {
2434             LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2435                      frameNumber, (long)idx);
2436             return BAD_VALUE;
2437         }
2438         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2439             LOGE("Request %d: Buffer %ld: Status not OK!",
2440                      frameNumber, (long)idx);
2441             return BAD_VALUE;
2442         }
2443         if (b->release_fence != -1) {
2444             LOGE("Request %d: Buffer %ld: Has a release fence!",
2445                      frameNumber, (long)idx);
2446             return BAD_VALUE;
2447         }
2448         if (b->buffer == NULL) {
2449             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2450                      frameNumber, (long)idx);
2451             return BAD_VALUE;
2452         }
2453         if (*(b->buffer) == NULL) {
2454             LOGE("Request %d: Buffer %ld: NULL private handle!",
2455                      frameNumber, (long)idx);
2456             return BAD_VALUE;
2457         }
2458         idx++;
2459         b = request->output_buffers + idx;
2460     } while (idx < (ssize_t)request->num_output_buffers);
2461 
2462     return NO_ERROR;
2463 }
2464 
2465 /*===========================================================================
2466  * FUNCTION   : deriveMinFrameDuration
2467  *
2468  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2469  *              on currently configured streams.
2470  *
2471  * PARAMETERS : NONE
2472  *
2473  * RETURN     : NONE
2474  *
2475  *==========================================================================*/
deriveMinFrameDuration()2476 void QCamera3HardwareInterface::deriveMinFrameDuration()
2477 {
2478     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2479 
2480     maxJpegDim = 0;
2481     maxProcessedDim = 0;
2482     maxRawDim = 0;
2483 
2484     // Figure out maximum jpeg, processed, and raw dimensions
2485     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2486         it != mStreamInfo.end(); it++) {
2487 
2488         // Input stream doesn't have valid stream_type
2489         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2490             continue;
2491 
2492         int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2493         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2494             if (dimension > maxJpegDim)
2495                 maxJpegDim = dimension;
2496         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2497                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2498                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2499             if (dimension > maxRawDim)
2500                 maxRawDim = dimension;
2501         } else {
2502             if (dimension > maxProcessedDim)
2503                 maxProcessedDim = dimension;
2504         }
2505     }
2506 
2507     size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2508             MAX_SIZES_CNT);
2509 
2510     //Assume all jpeg dimensions are in processed dimensions.
2511     if (maxJpegDim > maxProcessedDim)
2512         maxProcessedDim = maxJpegDim;
2513     //Find the smallest raw dimension that is greater or equal to jpeg dimension
2514     if (maxProcessedDim > maxRawDim) {
2515         maxRawDim = INT32_MAX;
2516 
2517         for (size_t i = 0; i < count; i++) {
2518             int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2519                     gCamCapability[mCameraId]->raw_dim[i].height;
2520             if (dimension >= maxProcessedDim && dimension < maxRawDim)
2521                 maxRawDim = dimension;
2522         }
2523     }
2524 
2525     //Find minimum durations for processed, jpeg, and raw
2526     for (size_t i = 0; i < count; i++) {
2527         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2528                 gCamCapability[mCameraId]->raw_dim[i].height) {
2529             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2530             break;
2531         }
2532     }
2533     count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2534     for (size_t i = 0; i < count; i++) {
2535         if (maxProcessedDim ==
2536                 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2537                 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2538             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2539             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2540             break;
2541         }
2542     }
2543 }
2544 
2545 /*===========================================================================
2546  * FUNCTION   : getMinFrameDuration
2547  *
2548  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2549  *              and current request configuration.
2550  *
2551  * PARAMETERS : @request: requset sent by the frameworks
2552  *
2553  * RETURN     : min farme duration for a particular request
2554  *
2555  *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)2556 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2557 {
2558     bool hasJpegStream = false;
2559     bool hasRawStream = false;
2560     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2561         const camera3_stream_t *stream = request->output_buffers[i].stream;
2562         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2563             hasJpegStream = true;
2564         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2565                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2566                 stream->format == HAL_PIXEL_FORMAT_RAW16)
2567             hasRawStream = true;
2568     }
2569 
2570     if (!hasJpegStream)
2571         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2572     else
2573         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2574 }
2575 
2576 /*===========================================================================
2577  * FUNCTION   : handleBuffersDuringFlushLock
2578  *
2579  * DESCRIPTION: Account for buffers returned from back-end during flush
2580  *              This function is executed while mMutex is held by the caller.
2581  *
2582  * PARAMETERS :
2583  *   @buffer: image buffer for the callback
2584  *
2585  * RETURN     :
2586  *==========================================================================*/
handleBuffersDuringFlushLock(camera3_stream_buffer_t * buffer)2587 void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2588 {
2589     bool buffer_found = false;
2590     for (List<PendingBuffersInRequest>::iterator req =
2591             mPendingBuffersMap.mPendingBuffersInRequest.begin();
2592             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2593         for (List<PendingBufferInfo>::iterator i =
2594                 req->mPendingBufferList.begin();
2595                 i != req->mPendingBufferList.end(); i++) {
2596             if (i->buffer == buffer->buffer) {
2597                 mPendingBuffersMap.numPendingBufsAtFlush--;
2598                 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2599                     buffer->buffer, req->frame_number,
2600                     mPendingBuffersMap.numPendingBufsAtFlush);
2601                 buffer_found = true;
2602                 break;
2603             }
2604         }
2605         if (buffer_found) {
2606             break;
2607         }
2608     }
2609     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2610         //signal the flush()
2611         LOGD("All buffers returned to HAL. Continue flush");
2612         pthread_cond_signal(&mBuffersCond);
2613     }
2614 }
2615 
2616 
2617 /*===========================================================================
2618  * FUNCTION   : handlePendingReprocResults
2619  *
2620  * DESCRIPTION: check and notify on any pending reprocess results
2621  *
2622  * PARAMETERS :
2623  *   @frame_number   : Pending request frame number
2624  *
2625  * RETURN     : int32_t type of status
2626  *              NO_ERROR  -- success
2627  *              none-zero failure code
2628  *==========================================================================*/
handlePendingReprocResults(uint32_t frame_number)2629 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2630 {
2631     for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2632             j != mPendingReprocessResultList.end(); j++) {
2633         if (j->frame_number == frame_number) {
2634             mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2635 
2636             LOGD("Delayed reprocess notify %d",
2637                     frame_number);
2638 
2639             for (pendingRequestIterator k = mPendingRequestsList.begin();
2640                     k != mPendingRequestsList.end(); k++) {
2641 
2642                 if (k->frame_number == j->frame_number) {
2643                     LOGD("Found reprocess frame number %d in pending reprocess List "
2644                             "Take it out!!",
2645                             k->frame_number);
2646 
2647                     camera3_capture_result result;
2648                     memset(&result, 0, sizeof(camera3_capture_result));
2649                     result.frame_number = frame_number;
2650                     result.num_output_buffers = 1;
2651                     result.output_buffers =  &j->buffer;
2652                     result.input_buffer = k->input_buffer;
2653                     result.result = k->settings;
2654                     result.partial_result = PARTIAL_RESULT_COUNT;
2655                     mCallbackOps->process_capture_result(mCallbackOps, &result);
2656 
2657                     erasePendingRequest(k);
2658                     break;
2659                 }
2660             }
2661             mPendingReprocessResultList.erase(j);
2662             break;
2663         }
2664     }
2665     return NO_ERROR;
2666 }
2667 
2668 /*===========================================================================
2669  * FUNCTION   : handleBatchMetadata
2670  *
2671  * DESCRIPTION: Handles metadata buffer callback in batch mode
2672  *
2673  * PARAMETERS : @metadata_buf: metadata buffer
2674  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2675  *                 the meta buf in this method
2676  *
2677  * RETURN     :
2678  *
2679  *==========================================================================*/
handleBatchMetadata(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf)2680 void QCamera3HardwareInterface::handleBatchMetadata(
2681         mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2682 {
2683     ATRACE_CALL();
2684 
2685     if (NULL == metadata_buf) {
2686         LOGE("metadata_buf is NULL");
2687         return;
2688     }
2689     /* In batch mode, the metdata will contain the frame number and timestamp of
2690      * the last frame in the batch. Eg: a batch containing buffers from request
2691      * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2692      * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2693      * multiple process_capture_results */
2694     metadata_buffer_t *metadata =
2695             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2696     int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2697     uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2698     uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2699     uint32_t frame_number = 0, urgent_frame_number = 0;
2700     int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2701     bool invalid_metadata = false;
2702     size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2703     size_t loopCount = 1;
2704 
2705     int32_t *p_frame_number_valid =
2706             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2707     uint32_t *p_frame_number =
2708             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2709     int64_t *p_capture_time =
2710             POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2711     int32_t *p_urgent_frame_number_valid =
2712             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2713     uint32_t *p_urgent_frame_number =
2714             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2715 
2716     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2717             (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2718             (NULL == p_urgent_frame_number)) {
2719         LOGE("Invalid metadata");
2720         invalid_metadata = true;
2721     } else {
2722         frame_number_valid = *p_frame_number_valid;
2723         last_frame_number = *p_frame_number;
2724         last_frame_capture_time = *p_capture_time;
2725         urgent_frame_number_valid = *p_urgent_frame_number_valid;
2726         last_urgent_frame_number = *p_urgent_frame_number;
2727     }
2728 
2729     /* In batchmode, when no video buffers are requested, set_parms are sent
2730      * for every capture_request. The difference between consecutive urgent
2731      * frame numbers and frame numbers should be used to interpolate the
2732      * corresponding frame numbers and time stamps */
2733     pthread_mutex_lock(&mMutex);
2734     if (urgent_frame_number_valid) {
2735         ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2736         if(idx < 0) {
2737             LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2738                 last_urgent_frame_number);
2739             mState = ERROR;
2740             pthread_mutex_unlock(&mMutex);
2741             return;
2742         }
2743         first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
2744         urgentFrameNumDiff = last_urgent_frame_number + 1 -
2745                 first_urgent_frame_number;
2746 
2747         LOGH("urgent_frm: valid: %d frm_num: %d - %d",
2748                  urgent_frame_number_valid,
2749                 first_urgent_frame_number, last_urgent_frame_number);
2750     }
2751 
2752     if (frame_number_valid) {
2753         ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2754         if(idx < 0) {
2755             LOGE("Invalid frame number received: %d. Irrecoverable error",
2756                 last_frame_number);
2757             mState = ERROR;
2758             pthread_mutex_unlock(&mMutex);
2759             return;
2760         }
2761         first_frame_number = mPendingBatchMap.valueAt(idx);
2762         frameNumDiff = last_frame_number + 1 -
2763                 first_frame_number;
2764         mPendingBatchMap.removeItem(last_frame_number);
2765 
2766         LOGH("frm: valid: %d frm_num: %d - %d",
2767                  frame_number_valid,
2768                 first_frame_number, last_frame_number);
2769 
2770     }
2771     pthread_mutex_unlock(&mMutex);
2772 
2773     if (urgent_frame_number_valid || frame_number_valid) {
2774         loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2775         if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2776             LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2777                      urgentFrameNumDiff, last_urgent_frame_number);
2778         if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2779             LOGE("frameNumDiff: %d frameNum: %d",
2780                      frameNumDiff, last_frame_number);
2781     }
2782 
2783     for (size_t i = 0; i < loopCount; i++) {
2784         /* handleMetadataWithLock is called even for invalid_metadata for
2785          * pipeline depth calculation */
2786         if (!invalid_metadata) {
2787             /* Infer frame number. Batch metadata contains frame number of the
2788              * last frame */
2789             if (urgent_frame_number_valid) {
2790                 if (i < urgentFrameNumDiff) {
2791                     urgent_frame_number =
2792                             first_urgent_frame_number + i;
2793                     LOGD("inferred urgent frame_number: %d",
2794                              urgent_frame_number);
2795                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2796                             CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2797                 } else {
2798                     /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2799                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2800                             CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2801                 }
2802             }
2803 
2804             /* Infer frame number. Batch metadata contains frame number of the
2805              * last frame */
2806             if (frame_number_valid) {
2807                 if (i < frameNumDiff) {
2808                     frame_number = first_frame_number + i;
2809                     LOGD("inferred frame_number: %d", frame_number);
2810                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2811                             CAM_INTF_META_FRAME_NUMBER, frame_number);
2812                 } else {
2813                     /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2814                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2815                              CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2816                 }
2817             }
2818 
2819             if (last_frame_capture_time) {
2820                 //Infer timestamp
2821                 first_frame_capture_time = last_frame_capture_time -
2822                         (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
2823                 capture_time =
2824                         first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
2825                 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2826                         CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2827                 LOGH("batch capture_time: %lld, capture_time: %lld",
2828                          last_frame_capture_time, capture_time);
2829             }
2830         }
2831         pthread_mutex_lock(&mMutex);
2832         handleMetadataWithLock(metadata_buf,
2833                 false /* free_and_bufdone_meta_buf */,
2834                 (i == 0) /* first metadata in the batch metadata */);
2835         pthread_mutex_unlock(&mMutex);
2836     }
2837 
2838     /* BufDone metadata buffer */
2839     if (free_and_bufdone_meta_buf) {
2840         mMetadataChannel->bufDone(metadata_buf);
2841         free(metadata_buf);
2842     }
2843 }
2844 
notifyError(uint32_t frameNumber,camera3_error_msg_code_t errorCode)2845 void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2846         camera3_error_msg_code_t errorCode)
2847 {
2848     camera3_notify_msg_t notify_msg;
2849     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2850     notify_msg.type = CAMERA3_MSG_ERROR;
2851     notify_msg.message.error.error_code = errorCode;
2852     notify_msg.message.error.error_stream = NULL;
2853     notify_msg.message.error.frame_number = frameNumber;
2854     mCallbackOps->notify(mCallbackOps, &notify_msg);
2855 
2856     return;
2857 }
2858 /*===========================================================================
2859  * FUNCTION   : handleMetadataWithLock
2860  *
2861  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2862  *
2863  * PARAMETERS : @metadata_buf: metadata buffer
2864  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2865  *                 the meta buf in this method
2866  *              @firstMetadataInBatch: Boolean to indicate whether this is the
2867  *                  first metadata in a batch. Valid only for batch mode
2868  *
2869  * RETURN     :
2870  *
2871  *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf,bool firstMetadataInBatch)2872 void QCamera3HardwareInterface::handleMetadataWithLock(
2873     mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2874     bool firstMetadataInBatch)
2875 {
2876     ATRACE_CALL();
2877     if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2878         //during flush do not send metadata from this thread
2879         LOGD("not sending metadata during flush or when mState is error");
2880         if (free_and_bufdone_meta_buf) {
2881             mMetadataChannel->bufDone(metadata_buf);
2882             free(metadata_buf);
2883         }
2884         return;
2885     }
2886 
2887     //not in flush
2888     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2889     int32_t frame_number_valid, urgent_frame_number_valid;
2890     uint32_t frame_number, urgent_frame_number;
2891     int64_t capture_time;
2892     nsecs_t currentSysTime;
2893 
2894     int32_t *p_frame_number_valid =
2895             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2896     uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2897     int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2898     int32_t *p_urgent_frame_number_valid =
2899             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2900     uint32_t *p_urgent_frame_number =
2901             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2902     IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2903             metadata) {
2904         LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2905                  *p_frame_number_valid, *p_frame_number);
2906     }
2907 
2908     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2909             (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2910         LOGE("Invalid metadata");
2911         if (free_and_bufdone_meta_buf) {
2912             mMetadataChannel->bufDone(metadata_buf);
2913             free(metadata_buf);
2914         }
2915         goto done_metadata;
2916     }
2917     frame_number_valid =        *p_frame_number_valid;
2918     frame_number =              *p_frame_number;
2919     capture_time =              *p_capture_time;
2920     urgent_frame_number_valid = *p_urgent_frame_number_valid;
2921     urgent_frame_number =       *p_urgent_frame_number;
2922     currentSysTime =            systemTime(CLOCK_MONOTONIC);
2923 
2924     // Detect if buffers from any requests are overdue
2925     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2926         if ( (currentSysTime - req.timestamp) >
2927             s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2928             for (auto &missed : req.mPendingBufferList) {
2929                 assert(missed.stream->priv);
2930                 if (missed.stream->priv) {
2931                     QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
2932                     assert(ch->mStreams[0]);
2933                     if (ch->mStreams[0]) {
2934                         LOGW("Missing: frame = %d, buffer = %p,"
2935                             "stream type = %d, stream format = %d",
2936                             req.frame_number, missed.buffer,
2937                             ch->mStreams[0]->getMyType(), missed.stream->format);
2938                         ch->timeoutFrame(req.frame_number);
2939                     }
2940                 }
2941             }
2942         }
2943     }
2944     //Partial result on process_capture_result for timestamp
2945     if (urgent_frame_number_valid) {
2946         LOGD("valid urgent frame_number = %u, capture_time = %lld",
2947            urgent_frame_number, capture_time);
2948 
2949         //Recieved an urgent Frame Number, handle it
2950         //using partial results
2951         for (pendingRequestIterator i =
2952                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2953             LOGD("Iterator Frame = %d urgent frame = %d",
2954                  i->frame_number, urgent_frame_number);
2955 
2956             if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2957                 (i->partial_result_cnt == 0)) {
2958                 LOGE("Error: HAL missed urgent metadata for frame number %d",
2959                          i->frame_number);
2960             }
2961 
2962             if (i->frame_number == urgent_frame_number &&
2963                      i->bUrgentReceived == 0) {
2964 
2965                 camera3_capture_result_t result;
2966                 memset(&result, 0, sizeof(camera3_capture_result_t));
2967 
2968                 i->partial_result_cnt++;
2969                 i->bUrgentReceived = 1;
2970                 // Extract 3A metadata
2971                 result.result =
2972                     translateCbUrgentMetadataToResultMetadata(metadata);
2973                 // Populate metadata result
2974                 result.frame_number = urgent_frame_number;
2975                 result.num_output_buffers = 0;
2976                 result.output_buffers = NULL;
2977                 result.partial_result = i->partial_result_cnt;
2978 
2979                 mCallbackOps->process_capture_result(mCallbackOps, &result);
2980                 LOGD("urgent frame_number = %u, capture_time = %lld",
2981                       result.frame_number, capture_time);
2982                 free_camera_metadata((camera_metadata_t *)result.result);
2983                 break;
2984             }
2985         }
2986     }
2987 
2988     if (!frame_number_valid) {
2989         LOGD("Not a valid normal frame number, used as SOF only");
2990         if (free_and_bufdone_meta_buf) {
2991             mMetadataChannel->bufDone(metadata_buf);
2992             free(metadata_buf);
2993         }
2994         goto done_metadata;
2995     }
2996     LOGH("valid frame_number = %u, capture_time = %lld",
2997             frame_number, capture_time);
2998 
2999     for (pendingRequestIterator i = mPendingRequestsList.begin();
3000             i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
3001         // Flush out all entries with less or equal frame numbers.
3002 
3003         camera3_capture_result_t result;
3004         memset(&result, 0, sizeof(camera3_capture_result_t));
3005 
3006         LOGD("frame_number in the list is %u", i->frame_number);
3007         i->partial_result_cnt++;
3008         result.partial_result = i->partial_result_cnt;
3009 
3010         // Check whether any stream buffer corresponding to this is dropped or not
3011         // If dropped, then send the ERROR_BUFFER for the corresponding stream
3012         // The API does not expect a blob buffer to be dropped
3013         if (p_cam_frame_drop) {
3014             /* Clear notify_msg structure */
3015             camera3_notify_msg_t notify_msg;
3016             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3017             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3018                     j != i->buffers.end(); j++) {
3019                 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
3020                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3021                 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
3022                     if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
3023                         // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3024                         LOGE("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
3025                                 __func__, i->frame_number, streamID, j->stream->format);
3026                         notify_msg.type = CAMERA3_MSG_ERROR;
3027                         notify_msg.message.error.frame_number = i->frame_number;
3028                         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
3029                         notify_msg.message.error.error_stream = j->stream;
3030                         mCallbackOps->notify(mCallbackOps, &notify_msg);
3031                         LOGE("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
3032                                 __func__, i->frame_number, streamID, j->stream->format);
3033                         PendingFrameDropInfo PendingFrameDrop;
3034                         PendingFrameDrop.frame_number=i->frame_number;
3035                         PendingFrameDrop.stream_ID = streamID;
3036                         // Add the Frame drop info to mPendingFrameDropList
3037                         mPendingFrameDropList.push_back(PendingFrameDrop);
3038                    }
3039                }
3040             }
3041         }
3042 
3043         // Send empty metadata with already filled buffers for dropped metadata
3044         // and send valid metadata with already filled buffers for current metadata
3045         /* we could hit this case when we either
3046          * 1. have a pending reprocess request or
3047          * 2. miss a metadata buffer callback */
3048         if (i->frame_number < frame_number) {
3049             if (i->input_buffer) {
3050                 /* this will be handled in handleInputBufferWithLock */
3051                 i++;
3052                 continue;
3053             } else {
3054 
3055                 mPendingLiveRequest--;
3056 
3057                 CameraMetadata dummyMetadata;
3058                 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3059                 result.result = dummyMetadata.release();
3060 
3061                 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
3062             }
3063         } else {
3064             mPendingLiveRequest--;
3065             /* Clear notify_msg structure */
3066             camera3_notify_msg_t notify_msg;
3067             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3068 
3069             // Send shutter notify to frameworks
3070             notify_msg.type = CAMERA3_MSG_SHUTTER;
3071             notify_msg.message.shutter.frame_number = i->frame_number;
3072             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3073             mCallbackOps->notify(mCallbackOps, &notify_msg);
3074 
3075             i->timestamp = capture_time;
3076 
3077             /* Set the timestamp in display metadata so that clients aware of
3078                private_handle such as VT can use this un-modified timestamps.
3079                Camera framework is unaware of this timestamp and cannot change this */
3080             updateTimeStampInPendingBuffers(i->frame_number, i->timestamp);
3081 
3082             // Find channel requiring metadata, meaning internal offline postprocess
3083             // is needed.
3084             //TODO: for now, we don't support two streams requiring metadata at the same time.
3085             // (because we are not making copies, and metadata buffer is not reference counted.
3086             bool internalPproc = false;
3087             for (pendingBufferIterator iter = i->buffers.begin();
3088                     iter != i->buffers.end(); iter++) {
3089                 if (iter->need_metadata) {
3090                     internalPproc = true;
3091                     QCamera3ProcessingChannel *channel =
3092                             (QCamera3ProcessingChannel *)iter->stream->priv;
3093                     channel->queueReprocMetadata(metadata_buf);
3094                     break;
3095                 }
3096             }
3097 
3098             // atrace_begin(ATRACE_TAG_ALWAYS, "translateFromHalMetadata");
3099             result.result = translateFromHalMetadata(metadata,
3100                     i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
3101                     i->capture_intent, i->hybrid_ae_enable,
3102                      /* DevCamDebug metadata translateFromHalMetadata function call*/
3103                     i->DevCamDebug_meta_enable,
3104                     /* DevCamDebug metadata end */
3105                     internalPproc, i->fwkCacMode,
3106                     firstMetadataInBatch);
3107             // atrace_end(ATRACE_TAG_ALWAYS);
3108 
3109             saveExifParams(metadata);
3110 
3111             if (i->blob_request) {
3112                 {
3113                     //Dump tuning metadata if enabled and available
3114                     char prop[PROPERTY_VALUE_MAX];
3115                     memset(prop, 0, sizeof(prop));
3116                     property_get("persist.camera.dumpmetadata", prop, "0");
3117                     int32_t enabled = atoi(prop);
3118                     if (enabled && metadata->is_tuning_params_valid) {
3119                         dumpMetadataToFile(metadata->tuning_params,
3120                                mMetaFrameCount,
3121                                enabled,
3122                                "Snapshot",
3123                                frame_number);
3124                     }
3125                 }
3126             }
3127 
3128             if (!internalPproc) {
3129                 LOGD("couldn't find need_metadata for this metadata");
3130                 // Return metadata buffer
3131                 if (free_and_bufdone_meta_buf) {
3132                     mMetadataChannel->bufDone(metadata_buf);
3133                     free(metadata_buf);
3134                 }
3135             }
3136         }
3137         if (!result.result) {
3138             LOGE("metadata is NULL");
3139         }
3140         result.frame_number = i->frame_number;
3141         result.input_buffer = i->input_buffer;
3142         result.num_output_buffers = 0;
3143         result.output_buffers = NULL;
3144         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3145                     j != i->buffers.end(); j++) {
3146             if (j->buffer) {
3147                 result.num_output_buffers++;
3148             }
3149         }
3150 
3151         updateFpsInPreviewBuffer(metadata, i->frame_number);
3152 
3153         if (result.num_output_buffers > 0) {
3154             camera3_stream_buffer_t *result_buffers =
3155                 new camera3_stream_buffer_t[result.num_output_buffers];
3156             if (result_buffers != NULL) {
3157                 size_t result_buffers_idx = 0;
3158                 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3159                         j != i->buffers.end(); j++) {
3160                     if (j->buffer) {
3161                         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3162                                 m != mPendingFrameDropList.end(); m++) {
3163                             QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3164                             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3165                             if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3166                                 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3167                                 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3168                                         frame_number, streamID);
3169                                 m = mPendingFrameDropList.erase(m);
3170                                 break;
3171                             }
3172                         }
3173                         j->buffer->status |= mPendingBuffersMap.getBufErrStatus(j->buffer->buffer);
3174                         mPendingBuffersMap.removeBuf(j->buffer->buffer);
3175                         result_buffers[result_buffers_idx++] = *(j->buffer);
3176                         free(j->buffer);
3177                         j->buffer = NULL;
3178                     }
3179                 }
3180 
3181                 result.output_buffers = result_buffers;
3182                 mCallbackOps->process_capture_result(mCallbackOps, &result);
3183                 LOGD("meta frame_number = %u, capture_time = %lld",
3184                         result.frame_number, i->timestamp);
3185                 free_camera_metadata((camera_metadata_t *)result.result);
3186                 delete[] result_buffers;
3187             }else {
3188                 LOGE("Fatal error: out of memory");
3189             }
3190         } else {
3191             mCallbackOps->process_capture_result(mCallbackOps, &result);
3192             LOGD("meta frame_number = %u, capture_time = %lld",
3193                     result.frame_number, i->timestamp);
3194             free_camera_metadata((camera_metadata_t *)result.result);
3195         }
3196 
3197         i = erasePendingRequest(i);
3198 
3199         if (!mPendingReprocessResultList.empty()) {
3200             handlePendingReprocResults(frame_number + 1);
3201         }
3202     }
3203 
3204 done_metadata:
3205     for (pendingRequestIterator i = mPendingRequestsList.begin();
3206             i != mPendingRequestsList.end() ;i++) {
3207         i->pipeline_depth++;
3208     }
3209     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3210     unblockRequestIfNecessary();
3211 }
3212 
3213 /*===========================================================================
3214  * FUNCTION   : hdrPlusPerfLock
3215  *
3216  * DESCRIPTION: perf lock for HDR+ using custom intent
3217  *
3218  * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3219  *
3220  * RETURN     : None
3221  *
3222  *==========================================================================*/
hdrPlusPerfLock(mm_camera_super_buf_t * metadata_buf)3223 void QCamera3HardwareInterface::hdrPlusPerfLock(
3224         mm_camera_super_buf_t *metadata_buf)
3225 {
3226     if (NULL == metadata_buf) {
3227         LOGE("metadata_buf is NULL");
3228         return;
3229     }
3230     metadata_buffer_t *metadata =
3231             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3232     int32_t *p_frame_number_valid =
3233             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3234     uint32_t *p_frame_number =
3235             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3236 
3237     if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3238         LOGE("%s: Invalid metadata", __func__);
3239         return;
3240     }
3241 
3242     //acquire perf lock for 5 sec after the last HDR frame is captured
3243     if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3244         if ((p_frame_number != NULL) &&
3245                 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3246             m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3247         }
3248     }
3249 
3250     //release lock after perf lock timer is expired. If lock is already released,
3251     //isTimerReset returns false
3252     if (m_perfLock.isTimerReset()) {
3253         mLastCustIntentFrmNum = -1;
3254         m_perfLock.lock_rel_timed();
3255     }
3256 }
3257 
3258 /*===========================================================================
3259  * FUNCTION   : handleInputBufferWithLock
3260  *
3261  * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3262  *
3263  * PARAMETERS : @frame_number: frame number of the input buffer
3264  *
3265  * RETURN     :
3266  *
3267  *==========================================================================*/
handleInputBufferWithLock(uint32_t frame_number)3268 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3269 {
3270     ATRACE_CALL();
3271     pendingRequestIterator i = mPendingRequestsList.begin();
3272     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3273         i++;
3274     }
3275     if (i != mPendingRequestsList.end() && i->input_buffer) {
3276         //found the right request
3277         if (!i->shutter_notified) {
3278             CameraMetadata settings;
3279             camera3_notify_msg_t notify_msg;
3280             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3281             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3282             if(i->settings) {
3283                 settings = i->settings;
3284                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3285                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3286                 } else {
3287                     LOGE("No timestamp in input settings! Using current one.");
3288                 }
3289             } else {
3290                 LOGE("Input settings missing!");
3291             }
3292 
3293             notify_msg.type = CAMERA3_MSG_SHUTTER;
3294             notify_msg.message.shutter.frame_number = frame_number;
3295             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3296             mCallbackOps->notify(mCallbackOps, &notify_msg);
3297             i->shutter_notified = true;
3298             LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3299                         i->frame_number, notify_msg.message.shutter.timestamp);
3300         }
3301 
3302         if (i->input_buffer->release_fence != -1) {
3303            int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3304            close(i->input_buffer->release_fence);
3305            if (rc != OK) {
3306                LOGE("input buffer sync wait failed %d", rc);
3307            }
3308         }
3309 
3310         camera3_capture_result result;
3311         memset(&result, 0, sizeof(camera3_capture_result));
3312         result.frame_number = frame_number;
3313         result.result = i->settings;
3314         result.input_buffer = i->input_buffer;
3315         result.partial_result = PARTIAL_RESULT_COUNT;
3316 
3317         mCallbackOps->process_capture_result(mCallbackOps, &result);
3318         LOGD("Input request metadata and input buffer frame_number = %u",
3319                         i->frame_number);
3320         i = erasePendingRequest(i);
3321     } else {
3322         LOGE("Could not find input request for frame number %d", frame_number);
3323     }
3324 }
3325 
3326 /*===========================================================================
3327  * FUNCTION   : handleBufferWithLock
3328  *
3329  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3330  *
3331  * PARAMETERS : @buffer: image buffer for the callback
3332  *              @frame_number: frame number of the image buffer
3333  *
3334  * RETURN     :
3335  *
3336  *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)3337 void QCamera3HardwareInterface::handleBufferWithLock(
3338     camera3_stream_buffer_t *buffer, uint32_t frame_number)
3339 {
3340     ATRACE_CALL();
3341     /* Nothing to be done during error state */
3342     if ((ERROR == mState) || (DEINIT == mState)) {
3343         return;
3344     }
3345     if (mFlushPerf) {
3346         handleBuffersDuringFlushLock(buffer);
3347         return;
3348     }
3349     //not in flush
3350     // If the frame number doesn't exist in the pending request list,
3351     // directly send the buffer to the frameworks, and update pending buffers map
3352     // Otherwise, book-keep the buffer.
3353     pendingRequestIterator i = mPendingRequestsList.begin();
3354     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3355         i++;
3356     }
3357     if (i == mPendingRequestsList.end()) {
3358         // Verify all pending requests frame_numbers are greater
3359         for (pendingRequestIterator j = mPendingRequestsList.begin();
3360                 j != mPendingRequestsList.end(); j++) {
3361             if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3362                 LOGW("Error: pending live frame number %d is smaller than %d",
3363                          j->frame_number, frame_number);
3364             }
3365         }
3366         camera3_capture_result_t result;
3367         memset(&result, 0, sizeof(camera3_capture_result_t));
3368         result.result = NULL;
3369         result.frame_number = frame_number;
3370         result.num_output_buffers = 1;
3371         result.partial_result = 0;
3372         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3373                 m != mPendingFrameDropList.end(); m++) {
3374             QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3375             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3376             if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3377                 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3378                 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3379                          frame_number, streamID);
3380                 m = mPendingFrameDropList.erase(m);
3381                 break;
3382             }
3383         }
3384         buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
3385         result.output_buffers = buffer;
3386         LOGH("result frame_number = %d, buffer = %p",
3387                  frame_number, buffer->buffer);
3388 
3389         mPendingBuffersMap.removeBuf(buffer->buffer);
3390 
3391         mCallbackOps->process_capture_result(mCallbackOps, &result);
3392     } else {
3393         if (i->input_buffer) {
3394             CameraMetadata settings;
3395             camera3_notify_msg_t notify_msg;
3396             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3397             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3398             if(i->settings) {
3399                 settings = i->settings;
3400                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3401                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3402                 } else {
3403                     LOGW("No timestamp in input settings! Using current one.");
3404                 }
3405             } else {
3406                 LOGE("Input settings missing!");
3407             }
3408 
3409             notify_msg.type = CAMERA3_MSG_SHUTTER;
3410             notify_msg.message.shutter.frame_number = frame_number;
3411             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3412 
3413             if (i->input_buffer->release_fence != -1) {
3414                int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3415                close(i->input_buffer->release_fence);
3416                if (rc != OK) {
3417                    LOGE("input buffer sync wait failed %d", rc);
3418                }
3419             }
3420             buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
3421             mPendingBuffersMap.removeBuf(buffer->buffer);
3422 
3423             bool notifyNow = true;
3424             for (pendingRequestIterator j = mPendingRequestsList.begin();
3425                     j != mPendingRequestsList.end(); j++) {
3426                 if (j->frame_number < frame_number) {
3427                     notifyNow = false;
3428                     break;
3429                 }
3430             }
3431 
3432             if (notifyNow) {
3433                 camera3_capture_result result;
3434                 memset(&result, 0, sizeof(camera3_capture_result));
3435                 result.frame_number = frame_number;
3436                 result.result = i->settings;
3437                 result.input_buffer = i->input_buffer;
3438                 result.num_output_buffers = 1;
3439                 result.output_buffers = buffer;
3440                 result.partial_result = PARTIAL_RESULT_COUNT;
3441 
3442                 mCallbackOps->notify(mCallbackOps, &notify_msg);
3443                 mCallbackOps->process_capture_result(mCallbackOps, &result);
3444                 LOGD("Notify reprocess now %d!", frame_number);
3445                 i = erasePendingRequest(i);
3446             } else {
3447                 // Cache reprocess result for later
3448                 PendingReprocessResult pendingResult;
3449                 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3450                 pendingResult.notify_msg = notify_msg;
3451                 pendingResult.buffer = *buffer;
3452                 pendingResult.frame_number = frame_number;
3453                 mPendingReprocessResultList.push_back(pendingResult);
3454                 LOGD("Cache reprocess result %d!", frame_number);
3455             }
3456         } else {
3457             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3458                 j != i->buffers.end(); j++) {
3459                 if (j->stream == buffer->stream) {
3460                     if (j->buffer != NULL) {
3461                         LOGE("Error: buffer is already set");
3462                     } else {
3463                         j->buffer = (camera3_stream_buffer_t *)malloc(
3464                             sizeof(camera3_stream_buffer_t));
3465                         *(j->buffer) = *buffer;
3466                         LOGH("cache buffer %p at result frame_number %u",
3467                              buffer->buffer, frame_number);
3468                     }
3469                 }
3470             }
3471         }
3472     }
3473 }
3474 
3475 /*===========================================================================
3476  * FUNCTION   : unblockRequestIfNecessary
3477  *
3478  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3479  *              that mMutex is held when this function is called.
3480  *
3481  * PARAMETERS :
3482  *
3483  * RETURN     :
3484  *
3485  *==========================================================================*/
unblockRequestIfNecessary()3486 void QCamera3HardwareInterface::unblockRequestIfNecessary()
3487 {
3488    // Unblock process_capture_request
3489    pthread_cond_signal(&mRequestCond);
3490 }
3491 
3492 
3493 /*===========================================================================
3494  * FUNCTION   : processCaptureRequest
3495  *
3496  * DESCRIPTION: process a capture request from camera service
3497  *
3498  * PARAMETERS :
3499  *   @request : request from framework to process
3500  *
3501  * RETURN     :
3502  *
3503  *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request)3504 int QCamera3HardwareInterface::processCaptureRequest(
3505                     camera3_capture_request_t *request)
3506 {
3507     ATRACE_CALL();
3508     int rc = NO_ERROR;
3509     int32_t request_id;
3510     CameraMetadata meta;
3511     bool isVidBufRequested = false;
3512     camera3_stream_buffer_t *pInputBuffer = NULL;
3513 
3514     pthread_mutex_lock(&mMutex);
3515 
3516     // Validate current state
3517     switch (mState) {
3518         case CONFIGURED:
3519         case STARTED:
3520             /* valid state */
3521             break;
3522 
3523         case ERROR:
3524             pthread_mutex_unlock(&mMutex);
3525             handleCameraDeviceError();
3526             return -ENODEV;
3527 
3528         default:
3529             LOGE("Invalid state %d", mState);
3530             pthread_mutex_unlock(&mMutex);
3531             return -ENODEV;
3532     }
3533 
3534     rc = validateCaptureRequest(request);
3535     if (rc != NO_ERROR) {
3536         LOGE("incoming request is not valid");
3537         pthread_mutex_unlock(&mMutex);
3538         return rc;
3539     }
3540 
3541     meta = request->settings;
3542 
3543     // For first capture request, send capture intent, and
3544     // stream on all streams
3545     if (mState == CONFIGURED) {
3546         // send an unconfigure to the backend so that the isp
3547         // resources are deallocated
3548         if (!mFirstConfiguration) {
3549             cam_stream_size_info_t stream_config_info;
3550             int32_t hal_version = CAM_HAL_V3;
3551             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3552             stream_config_info.buffer_info.min_buffers =
3553                     MIN_INFLIGHT_REQUESTS;
3554             stream_config_info.buffer_info.max_buffers =
3555                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3556             clear_metadata_buffer(mParameters);
3557             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3558                     CAM_INTF_PARM_HAL_VERSION, hal_version);
3559             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3560                     CAM_INTF_META_STREAM_INFO, stream_config_info);
3561             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3562                     mParameters);
3563             if (rc < 0) {
3564                 LOGE("set_parms for unconfigure failed");
3565                 pthread_mutex_unlock(&mMutex);
3566                 return rc;
3567             }
3568         }
3569         m_perfLock.lock_acq();
3570         /* get eis information for stream configuration */
3571         cam_is_type_t is_type;
3572         char is_type_value[PROPERTY_VALUE_MAX];
3573         property_get("persist.camera.is_type", is_type_value, "0");
3574         is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3575 
3576         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3577             int32_t hal_version = CAM_HAL_V3;
3578             uint8_t captureIntent =
3579                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3580             mCaptureIntent = captureIntent;
3581             clear_metadata_buffer(mParameters);
3582             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3583             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3584         }
3585 
3586         //If EIS is enabled, turn it on for video
3587         bool setEis = m_bEisEnable && m_bEisSupportedSize;
3588         int32_t vsMode;
3589         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3590         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3591             rc = BAD_VALUE;
3592         }
3593 
3594         //IS type will be 0 unless EIS is supported. If EIS is supported
3595         //it could either be 1 or 4 depending on the stream and video size
3596         if (setEis) {
3597             if (!m_bEisSupportedSize) {
3598                 is_type = IS_TYPE_DIS;
3599             } else {
3600                 is_type = IS_TYPE_EIS_2_0;
3601             }
3602             mStreamConfigInfo.is_type = is_type;
3603         } else {
3604             mStreamConfigInfo.is_type = IS_TYPE_NONE;
3605         }
3606 
3607         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3608                 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3609         int32_t tintless_value = 1;
3610         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3611                 CAM_INTF_PARM_TINTLESS, tintless_value);
3612         //Disable CDS for HFR mode or if DIS/EIS is on.
3613         //CDS is a session parameter in the backend/ISP, so need to be set/reset
3614         //after every configure_stream
3615         if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3616                 (m_bIsVideo)) {
3617             int32_t cds = CAM_CDS_MODE_OFF;
3618             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3619                     CAM_INTF_PARM_CDS_MODE, cds))
3620                 LOGE("Failed to disable CDS for HFR mode");
3621 
3622         }
3623 
3624         if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
3625             uint8_t* use_av_timer = NULL;
3626 
3627             if (m_debug_avtimer){
3628                 use_av_timer = &m_debug_avtimer;
3629             }
3630             else{
3631                 use_av_timer =
3632                     meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
3633             }
3634 
3635             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
3636                 rc = BAD_VALUE;
3637             }
3638         }
3639 
3640         setMobicat();
3641 
3642         /* Set fps and hfr mode while sending meta stream info so that sensor
3643          * can configure appropriate streaming mode */
3644         mHFRVideoFps = DEFAULT_VIDEO_FPS;
3645         mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
3646         mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3647         if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3648             rc = setHalFpsRange(meta, mParameters);
3649             if (rc == NO_ERROR) {
3650                 int32_t max_fps =
3651                     (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3652                 if (mBatchSize) {
3653                     /* For HFR, more buffers are dequeued upfront to improve the performance */
3654                     mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3655                     mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3656                 } else if (max_fps == 60) {
3657                     /* for 60 fps usecas increase inflight requests */
3658                     mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
3659                     mMaxInFlightRequests = MAX_INFLIGHT_60FPS_REQUESTS;
3660                 } else if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
3661                     /* for non 60 fps video use cases, set min = max inflight requests to
3662                     avoid frame drops due to degraded system performance */
3663                     mMinInFlightRequests = MAX_INFLIGHT_REQUESTS;
3664                 }
3665             }
3666             else {
3667                 LOGE("setHalFpsRange failed");
3668             }
3669         }
3670         if (meta.exists(ANDROID_CONTROL_MODE)) {
3671             uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3672             rc = extractSceneMode(meta, metaMode, mParameters);
3673             if (rc != NO_ERROR) {
3674                 LOGE("extractSceneMode failed");
3675             }
3676         }
3677         memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
3678 
3679 
3680         //TODO: validate the arguments, HSV scenemode should have only the
3681         //advertised fps ranges
3682 
3683         /*set the capture intent, hal version, tintless, stream info,
3684          *and disenable parameters to the backend*/
3685         LOGD("set_parms META_STREAM_INFO " );
3686         for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3687             LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
3688                     "Format:%d",
3689                     mStreamConfigInfo.type[i],
3690                     mStreamConfigInfo.stream_sizes[i].width,
3691                     mStreamConfigInfo.stream_sizes[i].height,
3692                     mStreamConfigInfo.postprocess_mask[i],
3693                     mStreamConfigInfo.format[i]);
3694         }
3695 
3696         rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3697                     mParameters);
3698         if (rc < 0) {
3699             LOGE("set_parms failed for hal version, stream info");
3700         }
3701 
3702         cam_dimension_t sensor_dim;
3703         memset(&sensor_dim, 0, sizeof(sensor_dim));
3704         rc = getSensorOutputSize(sensor_dim);
3705         if (rc != NO_ERROR) {
3706             LOGE("Failed to get sensor output size");
3707             pthread_mutex_unlock(&mMutex);
3708             goto error_exit;
3709         }
3710 
3711         mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3712                 gCamCapability[mCameraId]->active_array_size.height,
3713                 sensor_dim.width, sensor_dim.height);
3714 
3715         /* Set batchmode before initializing channel. Since registerBuffer
3716          * internally initializes some of the channels, better set batchmode
3717          * even before first register buffer */
3718         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3719             it != mStreamInfo.end(); it++) {
3720             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3721             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3722                     && mBatchSize) {
3723                 rc = channel->setBatchSize(mBatchSize);
3724                 //Disable per frame map unmap for HFR/batchmode case
3725                 rc |= channel->setPerFrameMapUnmap(false);
3726                 if (NO_ERROR != rc) {
3727                     LOGE("Channel init failed %d", rc);
3728                     pthread_mutex_unlock(&mMutex);
3729                     goto error_exit;
3730                 }
3731             }
3732         }
3733 
3734         //First initialize all streams
3735         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3736             it != mStreamInfo.end(); it++) {
3737             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3738             if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3739                ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3740                setEis)
3741                 rc = channel->initialize(is_type);
3742             else {
3743                 rc = channel->initialize(IS_TYPE_NONE);
3744             }
3745             if (NO_ERROR != rc) {
3746                 LOGE("Channel initialization failed %d", rc);
3747                 pthread_mutex_unlock(&mMutex);
3748                 goto error_exit;
3749             }
3750         }
3751 
3752         if (mRawDumpChannel) {
3753             rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3754             if (rc != NO_ERROR) {
3755                 LOGE("Error: Raw Dump Channel init failed");
3756                 pthread_mutex_unlock(&mMutex);
3757                 goto error_exit;
3758             }
3759         }
3760         if (mSupportChannel) {
3761             rc = mSupportChannel->initialize(IS_TYPE_NONE);
3762             if (rc < 0) {
3763                 LOGE("Support channel initialization failed");
3764                 pthread_mutex_unlock(&mMutex);
3765                 goto error_exit;
3766             }
3767         }
3768         if (mAnalysisChannel) {
3769             rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3770             if (rc < 0) {
3771                 LOGE("Analysis channel initialization failed");
3772                 pthread_mutex_unlock(&mMutex);
3773                 goto error_exit;
3774             }
3775         }
3776         if (mDummyBatchChannel) {
3777             rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3778             if (rc < 0) {
3779                 LOGE("mDummyBatchChannel setBatchSize failed");
3780                 pthread_mutex_unlock(&mMutex);
3781                 goto error_exit;
3782             }
3783             rc = mDummyBatchChannel->initialize(is_type);
3784             if (rc < 0) {
3785                 LOGE("mDummyBatchChannel initialization failed");
3786                 pthread_mutex_unlock(&mMutex);
3787                 goto error_exit;
3788             }
3789         }
3790 
3791         // Set bundle info
3792         rc = setBundleInfo();
3793         if (rc < 0) {
3794             LOGE("setBundleInfo failed %d", rc);
3795             pthread_mutex_unlock(&mMutex);
3796             goto error_exit;
3797         }
3798 
3799         //update settings from app here
3800         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3801             mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
3802             LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
3803         }
3804         if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
3805             mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
3806             LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
3807         }
3808         if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
3809             mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
3810             LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
3811 
3812             if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
3813                 (mLinkedCameraId != mCameraId) ) {
3814                 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
3815                     mLinkedCameraId, mCameraId);
3816                 goto error_exit;
3817             }
3818         }
3819 
3820         // add bundle related cameras
3821         LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
3822         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3823             if (mIsDeviceLinked)
3824                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
3825             else
3826                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
3827 
3828             pthread_mutex_lock(&gCamLock);
3829 
3830             if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
3831                 LOGE("Dualcam: Invalid Session Id ");
3832                 pthread_mutex_unlock(&gCamLock);
3833                 goto error_exit;
3834             }
3835 
3836             if (mIsMainCamera == 1) {
3837                 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
3838                 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
3839                 // related session id should be session id of linked session
3840                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3841             } else {
3842                 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
3843                 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
3844                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3845             }
3846             pthread_mutex_unlock(&gCamLock);
3847 
3848             rc = mCameraHandle->ops->sync_related_sensors(
3849                     mCameraHandle->camera_handle, m_pRelCamSyncBuf);
3850             if (rc < 0) {
3851                 LOGE("Dualcam: link failed");
3852                 goto error_exit;
3853             }
3854         }
3855 
3856         //Then start them.
3857         LOGH("Start META Channel");
3858         rc = mMetadataChannel->start();
3859         if (rc < 0) {
3860             LOGE("META channel start failed");
3861             pthread_mutex_unlock(&mMutex);
3862             goto error_exit;
3863         }
3864 
3865         if (mAnalysisChannel) {
3866             rc = mAnalysisChannel->start();
3867             if (rc < 0) {
3868                 LOGE("Analysis channel start failed");
3869                 mMetadataChannel->stop();
3870                 pthread_mutex_unlock(&mMutex);
3871                 goto error_exit;
3872             }
3873         }
3874 
3875         if (mSupportChannel) {
3876             rc = mSupportChannel->start();
3877             if (rc < 0) {
3878                 LOGE("Support channel start failed");
3879                 mMetadataChannel->stop();
3880                 /* Although support and analysis are mutually exclusive today
3881                    adding it in anycase for future proofing */
3882                 if (mAnalysisChannel) {
3883                     mAnalysisChannel->stop();
3884                 }
3885                 pthread_mutex_unlock(&mMutex);
3886                 goto error_exit;
3887             }
3888         }
3889         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3890             it != mStreamInfo.end(); it++) {
3891             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3892             LOGH("Start Processing Channel mask=%d",
3893                      channel->getStreamTypeMask());
3894             rc = channel->start();
3895             if (rc < 0) {
3896                 LOGE("channel start failed");
3897                 pthread_mutex_unlock(&mMutex);
3898                 goto error_exit;
3899             }
3900         }
3901 
3902         if (mRawDumpChannel) {
3903             LOGD("Starting raw dump stream");
3904             rc = mRawDumpChannel->start();
3905             if (rc != NO_ERROR) {
3906                 LOGE("Error Starting Raw Dump Channel");
3907                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3908                       it != mStreamInfo.end(); it++) {
3909                     QCamera3Channel *channel =
3910                         (QCamera3Channel *)(*it)->stream->priv;
3911                     LOGH("Stopping Processing Channel mask=%d",
3912                         channel->getStreamTypeMask());
3913                     channel->stop();
3914                 }
3915                 if (mSupportChannel)
3916                     mSupportChannel->stop();
3917                 if (mAnalysisChannel) {
3918                     mAnalysisChannel->stop();
3919                 }
3920                 mMetadataChannel->stop();
3921                 pthread_mutex_unlock(&mMutex);
3922                 goto error_exit;
3923             }
3924         }
3925 
3926         if (mChannelHandle) {
3927 
3928             rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3929                     mChannelHandle);
3930             if (rc != NO_ERROR) {
3931                 LOGE("start_channel failed %d", rc);
3932                 pthread_mutex_unlock(&mMutex);
3933                 goto error_exit;
3934             }
3935         }
3936 
3937         goto no_error;
3938 error_exit:
3939         m_perfLock.lock_rel();
3940         return rc;
3941 no_error:
3942         m_perfLock.lock_rel();
3943 
3944         mWokenUpByDaemon = false;
3945         mPendingLiveRequest = 0;
3946         mFirstConfiguration = false;
3947         enablePowerHint();
3948     }
3949 
3950     uint32_t frameNumber = request->frame_number;
3951     cam_stream_ID_t streamsArray;
3952 
3953     if (mFlushPerf) {
3954         //we cannot accept any requests during flush
3955         LOGE("process_capture_request cannot proceed during flush");
3956         pthread_mutex_unlock(&mMutex);
3957         return NO_ERROR; //should return an error
3958     }
3959 
3960     if (meta.exists(ANDROID_REQUEST_ID)) {
3961         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3962         mCurrentRequestId = request_id;
3963         LOGD("Received request with id: %d", request_id);
3964     } else if (mState == CONFIGURED || mCurrentRequestId == -1){
3965         LOGE("Unable to find request id field, \
3966                 & no previous id available");
3967         pthread_mutex_unlock(&mMutex);
3968         return NAME_NOT_FOUND;
3969     } else {
3970         LOGD("Re-using old request id");
3971         request_id = mCurrentRequestId;
3972     }
3973 
3974     LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
3975                                     request->num_output_buffers,
3976                                     request->input_buffer,
3977                                     frameNumber);
3978     // Acquire all request buffers first
3979     streamsArray.num_streams = 0;
3980     int blob_request = 0;
3981     uint32_t snapshotStreamId = 0;
3982     for (size_t i = 0; i < request->num_output_buffers; i++) {
3983         const camera3_stream_buffer_t& output = request->output_buffers[i];
3984         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3985 
3986         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3987             //Call function to store local copy of jpeg data for encode params.
3988             blob_request = 1;
3989             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3990         }
3991 
3992         if (output.acquire_fence != -1) {
3993            rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3994            close(output.acquire_fence);
3995            if (rc != OK) {
3996               LOGE("sync wait failed %d", rc);
3997               pthread_mutex_unlock(&mMutex);
3998               return rc;
3999            }
4000         }
4001 
4002         streamsArray.stream_request[streamsArray.num_streams++].streamID =
4003             channel->getStreamID(channel->getStreamTypeMask());
4004 
4005         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4006             isVidBufRequested = true;
4007         }
4008     }
4009 
4010     if (blob_request) {
4011         KPI_ATRACE_INT("SNAPSHOT", 1);
4012     }
4013     if (blob_request && mRawDumpChannel) {
4014         LOGD("Trigger Raw based on blob request if Raw dump is enabled");
4015         streamsArray.stream_request[streamsArray.num_streams].streamID =
4016             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
4017         streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
4018     }
4019 
4020     if(request->input_buffer == NULL) {
4021         /* Parse the settings:
4022          * - For every request in NORMAL MODE
4023          * - For every request in HFR mode during preview only case
4024          * - For first request of every batch in HFR mode during video
4025          * recording. In batchmode the same settings except frame number is
4026          * repeated in each request of the batch.
4027          */
4028         if (!mBatchSize ||
4029            (mBatchSize && !isVidBufRequested) ||
4030            (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
4031             rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
4032             if (rc < 0) {
4033                 LOGE("fail to set frame parameters");
4034                 pthread_mutex_unlock(&mMutex);
4035                 return rc;
4036             }
4037         }
4038         /* For batchMode HFR, setFrameParameters is not called for every
4039          * request. But only frame number of the latest request is parsed.
4040          * Keep track of first and last frame numbers in a batch so that
4041          * metadata for the frame numbers of batch can be duplicated in
4042          * handleBatchMetadta */
4043         if (mBatchSize) {
4044             if (!mToBeQueuedVidBufs) {
4045                 //start of the batch
4046                 mFirstFrameNumberInBatch = request->frame_number;
4047             }
4048             if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4049                 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4050                 LOGE("Failed to set the frame number in the parameters");
4051                 return BAD_VALUE;
4052             }
4053         }
4054         if (mNeedSensorRestart) {
4055             /* Unlock the mutex as restartSensor waits on the channels to be
4056              * stopped, which in turn calls stream callback functions -
4057              * handleBufferWithLock and handleMetadataWithLock */
4058             pthread_mutex_unlock(&mMutex);
4059             rc = dynamicUpdateMetaStreamInfo();
4060             if (rc != NO_ERROR) {
4061                 LOGE("Restarting the sensor failed");
4062                 return BAD_VALUE;
4063             }
4064             mNeedSensorRestart = false;
4065             pthread_mutex_lock(&mMutex);
4066         }
4067     } else {
4068 
4069         if (request->input_buffer->acquire_fence != -1) {
4070            rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4071            close(request->input_buffer->acquire_fence);
4072            if (rc != OK) {
4073               LOGE("input buffer sync wait failed %d", rc);
4074               pthread_mutex_unlock(&mMutex);
4075               return rc;
4076            }
4077         }
4078     }
4079 
4080     if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4081         mLastCustIntentFrmNum = frameNumber;
4082     }
4083     /* Update pending request list and pending buffers map */
4084     PendingRequestInfo pendingRequest;
4085     pendingRequestIterator latestRequest;
4086     pendingRequest.frame_number = frameNumber;
4087     pendingRequest.num_buffers = request->num_output_buffers;
4088     pendingRequest.request_id = request_id;
4089     pendingRequest.blob_request = blob_request;
4090     pendingRequest.timestamp = 0;
4091     pendingRequest.bUrgentReceived = 0;
4092     if (request->input_buffer) {
4093         pendingRequest.input_buffer =
4094                 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4095         *(pendingRequest.input_buffer) = *(request->input_buffer);
4096         pInputBuffer = pendingRequest.input_buffer;
4097     } else {
4098        pendingRequest.input_buffer = NULL;
4099        pInputBuffer = NULL;
4100     }
4101 
4102     pendingRequest.pipeline_depth = 0;
4103     pendingRequest.partial_result_cnt = 0;
4104     extractJpegMetadata(mCurJpegMeta, request);
4105     pendingRequest.jpegMetadata = mCurJpegMeta;
4106     pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4107     pendingRequest.shutter_notified = false;
4108 
4109     //extract capture intent
4110     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4111         mCaptureIntent =
4112                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4113     }
4114     if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
4115         mHybridAeEnable =
4116                 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
4117     }
4118     pendingRequest.capture_intent = mCaptureIntent;
4119     pendingRequest.hybrid_ae_enable = mHybridAeEnable;
4120     /* DevCamDebug metadata processCaptureRequest */
4121     if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4122         mDevCamDebugMetaEnable =
4123                 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4124     }
4125     pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4126     /* DevCamDebug metadata end */
4127 
4128     //extract CAC info
4129     if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4130         mCacMode =
4131                 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4132     }
4133     pendingRequest.fwkCacMode = mCacMode;
4134 
4135     PendingBuffersInRequest bufsForCurRequest;
4136     bufsForCurRequest.frame_number = frameNumber;
4137     // Mark current timestamp for the new request
4138     bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4139 
4140     for (size_t i = 0; i < request->num_output_buffers; i++) {
4141         RequestedBufferInfo requestedBuf;
4142         memset(&requestedBuf, 0, sizeof(requestedBuf));
4143         requestedBuf.stream = request->output_buffers[i].stream;
4144         requestedBuf.buffer = NULL;
4145         pendingRequest.buffers.push_back(requestedBuf);
4146 
4147         // Add to buffer handle the pending buffers list
4148         PendingBufferInfo bufferInfo;
4149         bufferInfo.buffer = request->output_buffers[i].buffer;
4150         bufferInfo.stream = request->output_buffers[i].stream;
4151         bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4152         QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4153         LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4154             frameNumber, bufferInfo.buffer,
4155             channel->getStreamTypeMask(), bufferInfo.stream->format);
4156     }
4157     // Add this request packet into mPendingBuffersMap
4158     mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4159     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4160         mPendingBuffersMap.get_num_overall_buffers());
4161 
4162     latestRequest = mPendingRequestsList.insert(
4163             mPendingRequestsList.end(), pendingRequest);
4164     if(mFlush) {
4165         LOGI("mFlush is true");
4166         pthread_mutex_unlock(&mMutex);
4167         return NO_ERROR;
4168     }
4169 
4170     int indexUsed;
4171     // Notify metadata channel we receive a request
4172     mMetadataChannel->request(NULL, frameNumber, indexUsed);
4173 
4174     if(request->input_buffer != NULL){
4175         LOGD("Input request, frame_number %d", frameNumber);
4176         rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4177         if (NO_ERROR != rc) {
4178             LOGE("fail to set reproc parameters");
4179             pthread_mutex_unlock(&mMutex);
4180             return rc;
4181         }
4182     }
4183 
4184     // Call request on other streams
4185     uint32_t streams_need_metadata = 0;
4186     pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4187     for (size_t i = 0; i < request->num_output_buffers; i++) {
4188         const camera3_stream_buffer_t& output = request->output_buffers[i];
4189         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4190 
4191         if (channel == NULL) {
4192             LOGW("invalid channel pointer for stream");
4193             continue;
4194         }
4195 
4196         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4197             LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4198                       output.buffer, request->input_buffer, frameNumber);
4199             if(request->input_buffer != NULL){
4200                 rc = channel->request(output.buffer, frameNumber,
4201                         pInputBuffer, &mReprocMeta, indexUsed);
4202                 if (rc < 0) {
4203                     LOGE("Fail to request on picture channel");
4204                     pthread_mutex_unlock(&mMutex);
4205                     return rc;
4206                 }
4207             } else {
4208                 LOGD("snapshot request with buffer %p, frame_number %d",
4209                          output.buffer, frameNumber);
4210                 if (!request->settings) {
4211                     rc = channel->request(output.buffer, frameNumber,
4212                             NULL, mPrevParameters, indexUsed);
4213                 } else {
4214                     rc = channel->request(output.buffer, frameNumber,
4215                             NULL, mParameters, indexUsed);
4216                 }
4217                 if (rc < 0) {
4218                     LOGE("Fail to request on picture channel");
4219                     pthread_mutex_unlock(&mMutex);
4220                     return rc;
4221                 }
4222 
4223                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4224                 uint32_t j = 0;
4225                 for (j = 0; j < streamsArray.num_streams; j++) {
4226                     if (streamsArray.stream_request[j].streamID == streamId) {
4227                       if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4228                           streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4229                       else
4230                           streamsArray.stream_request[j].buf_index = indexUsed;
4231                         break;
4232                     }
4233                 }
4234                 if (j == streamsArray.num_streams) {
4235                     LOGE("Did not find matching stream to update index");
4236                     assert(0);
4237                 }
4238 
4239                 pendingBufferIter->need_metadata = true;
4240                 streams_need_metadata++;
4241             }
4242         } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4243             bool needMetadata = false;
4244             QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4245             rc = yuvChannel->request(output.buffer, frameNumber,
4246                     pInputBuffer,
4247                     (pInputBuffer ? &mReprocMeta : mParameters), needMetadata, indexUsed);
4248             if (rc < 0) {
4249                 LOGE("Fail to request on YUV channel");
4250                 pthread_mutex_unlock(&mMutex);
4251                 return rc;
4252             }
4253 
4254             uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4255             uint32_t j = 0;
4256             for (j = 0; j < streamsArray.num_streams; j++) {
4257                 if (streamsArray.stream_request[j].streamID == streamId) {
4258                     if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4259                         streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4260                     else
4261                         streamsArray.stream_request[j].buf_index = indexUsed;
4262                     break;
4263                 }
4264             }
4265             if (j == streamsArray.num_streams) {
4266                 LOGE("Did not find matching stream to update index");
4267                 assert(0);
4268             }
4269 
4270             pendingBufferIter->need_metadata = needMetadata;
4271             if (needMetadata)
4272                 streams_need_metadata += 1;
4273             LOGD("calling YUV channel request, need_metadata is %d",
4274                      needMetadata);
4275         } else {
4276             LOGD("request with buffer %p, frame_number %d",
4277                   output.buffer, frameNumber);
4278 
4279             rc = channel->request(output.buffer, frameNumber, indexUsed);
4280 
4281             uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4282             uint32_t j = 0;
4283             for (j = 0; j < streamsArray.num_streams; j++) {
4284                 if (streamsArray.stream_request[j].streamID == streamId) {
4285                     if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4286                         streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4287                     else
4288                         streamsArray.stream_request[j].buf_index = indexUsed;
4289                     break;
4290                 }
4291             }
4292             if (j == streamsArray.num_streams) {
4293                 LOGE("Did not find matching stream to update index");
4294                 assert(0);
4295             }
4296 
4297             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4298                     && mBatchSize) {
4299                 mToBeQueuedVidBufs++;
4300                 if (mToBeQueuedVidBufs == mBatchSize) {
4301                     channel->queueBatchBuf();
4302                 }
4303             }
4304             if (rc < 0) {
4305                 LOGE("request failed");
4306                 pthread_mutex_unlock(&mMutex);
4307                 return rc;
4308             }
4309         }
4310         pendingBufferIter++;
4311     }
4312 
4313     //If 2 streams have need_metadata set to true, fail the request, unless
4314     //we copy/reference count the metadata buffer
4315     if (streams_need_metadata > 1) {
4316         LOGE("not supporting request in which two streams requires"
4317                 " 2 HAL metadata for reprocessing");
4318         pthread_mutex_unlock(&mMutex);
4319         return -EINVAL;
4320     }
4321 
4322     if (request->input_buffer == NULL) {
4323         /* Set the parameters to backend:
4324          * - For every request in NORMAL MODE
4325          * - For every request in HFR mode during preview only case
4326          * - Once every batch in HFR mode during video recording
4327          */
4328         if (!mBatchSize ||
4329            (mBatchSize && !isVidBufRequested) ||
4330            (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4331             LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4332                      mBatchSize, isVidBufRequested,
4333                     mToBeQueuedVidBufs);
4334 
4335             if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
4336                 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4337                     uint32_t m = 0;
4338                     for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4339                         if (streamsArray.stream_request[k].streamID ==
4340                                 mBatchedStreamsArray.stream_request[m].streamID)
4341                             break;
4342                         }
4343                         if (m == mBatchedStreamsArray.num_streams) {
4344                             mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4345                                 streamsArray.stream_request[k].streamID;
4346                             mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4347                                 streamsArray.stream_request[k].buf_index;
4348                             mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4349                         }
4350                 }
4351                 streamsArray = mBatchedStreamsArray;
4352             }
4353             /* Update stream id of all the requested buffers */
4354             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
4355                 LOGE("Failed to set stream type mask in the parameters");
4356                 return BAD_VALUE;
4357             }
4358 
4359             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4360                     mParameters);
4361             if (rc < 0) {
4362                 LOGE("set_parms failed");
4363             }
4364             /* reset to zero coz, the batch is queued */
4365             mToBeQueuedVidBufs = 0;
4366             mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
4367             memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
4368         } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
4369             for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4370                 uint32_t m = 0;
4371                 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4372                     if (streamsArray.stream_request[k].streamID ==
4373                             mBatchedStreamsArray.stream_request[m].streamID)
4374                         break;
4375                 }
4376                 if (m == mBatchedStreamsArray.num_streams) {
4377                     mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4378                         streamsArray.stream_request[k].streamID;
4379                     mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4380                         streamsArray.stream_request[k].buf_index;
4381                     mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4382                 }
4383             }
4384         }
4385         mPendingLiveRequest++;
4386     }
4387 
4388     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4389 
4390     mState = STARTED;
4391     // Added a timed condition wait
4392     struct timespec ts;
4393     uint8_t isValidTimeout = 1;
4394     rc = clock_gettime(CLOCK_MONOTONIC, &ts);
4395     if (rc < 0) {
4396       isValidTimeout = 0;
4397       LOGE("Error reading the real time clock!!");
4398     }
4399     else {
4400       // Make timeout as 5 sec for request to be honored
4401       ts.tv_sec += 5;
4402     }
4403     //Block on conditional variable
4404     while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
4405             (mState != ERROR) && (mState != DEINIT)) {
4406         if (!isValidTimeout) {
4407             LOGD("Blocking on conditional wait");
4408             pthread_cond_wait(&mRequestCond, &mMutex);
4409         }
4410         else {
4411             LOGD("Blocking on timed conditional wait");
4412             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4413             if (rc == ETIMEDOUT) {
4414                 rc = -ENODEV;
4415                 LOGE("Unblocked on timeout!!!!");
4416                 break;
4417             }
4418         }
4419         LOGD("Unblocked");
4420         if (mWokenUpByDaemon) {
4421             mWokenUpByDaemon = false;
4422             if (mPendingLiveRequest < mMaxInFlightRequests)
4423                 break;
4424         }
4425     }
4426     pthread_mutex_unlock(&mMutex);
4427 
4428     return rc;
4429 }
4430 
4431 /*===========================================================================
4432  * FUNCTION   : dump
4433  *
4434  * DESCRIPTION:
4435  *
4436  * PARAMETERS :
4437  *
4438  *
4439  * RETURN     :
4440  *==========================================================================*/
dump(int fd)4441 void QCamera3HardwareInterface::dump(int fd)
4442 {
4443     pthread_mutex_lock(&mMutex);
4444     dprintf(fd, "\n Camera HAL3 information Begin \n");
4445 
4446     dprintf(fd, "\nNumber of pending requests: %zu \n",
4447         mPendingRequestsList.size());
4448     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4449     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
4450     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4451     for(pendingRequestIterator i = mPendingRequestsList.begin();
4452             i != mPendingRequestsList.end(); i++) {
4453         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4454         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4455         i->input_buffer);
4456     }
4457     dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4458                 mPendingBuffersMap.get_num_overall_buffers());
4459     dprintf(fd, "-------+------------------\n");
4460     dprintf(fd, " Frame | Stream type mask \n");
4461     dprintf(fd, "-------+------------------\n");
4462     for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4463         for(auto &j : req.mPendingBufferList) {
4464             QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4465             dprintf(fd, " %5d | %11d \n",
4466                     req.frame_number, channel->getStreamTypeMask());
4467         }
4468     }
4469     dprintf(fd, "-------+------------------\n");
4470 
4471     dprintf(fd, "\nPending frame drop list: %zu\n",
4472         mPendingFrameDropList.size());
4473     dprintf(fd, "-------+-----------\n");
4474     dprintf(fd, " Frame | Stream ID \n");
4475     dprintf(fd, "-------+-----------\n");
4476     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4477         i != mPendingFrameDropList.end(); i++) {
4478         dprintf(fd, " %5d | %9d \n",
4479             i->frame_number, i->stream_ID);
4480     }
4481     dprintf(fd, "-------+-----------\n");
4482 
4483     dprintf(fd, "\n Camera HAL3 information End \n");
4484 
4485     /* use dumpsys media.camera as trigger to send update debug level event */
4486     mUpdateDebugLevel = true;
4487     pthread_mutex_unlock(&mMutex);
4488     return;
4489 }
4490 
4491 /*===========================================================================
4492  * FUNCTION   : flush
4493  *
4494  * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4495  *              conditionally restarts channels
4496  *
4497  * PARAMETERS :
4498  *  @ restartChannels: re-start all channels
4499  *
4500  *
4501  * RETURN     :
4502  *          0 on success
4503  *          Error code on failure
4504  *==========================================================================*/
flush(bool restartChannels)4505 int QCamera3HardwareInterface::flush(bool restartChannels)
4506 {
4507     KPI_ATRACE_CALL();
4508     int32_t rc = NO_ERROR;
4509 
4510     LOGD("Unblocking Process Capture Request");
4511     pthread_mutex_lock(&mMutex);
4512     mFlush = true;
4513     pthread_mutex_unlock(&mMutex);
4514 
4515     rc = stopAllChannels();
4516     // unlink of dualcam
4517     if (mIsDeviceLinked) {
4518         m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4519         pthread_mutex_lock(&gCamLock);
4520 
4521         if (mIsMainCamera == 1) {
4522             m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4523             m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
4524             // related session id should be session id of linked session
4525             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4526         } else {
4527             m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4528             m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
4529             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4530         }
4531         pthread_mutex_unlock(&gCamLock);
4532 
4533         rc = mCameraHandle->ops->sync_related_sensors(
4534                 mCameraHandle->camera_handle, m_pRelCamSyncBuf);
4535         if (rc < 0) {
4536             LOGE("Dualcam: Unlink failed, but still proceed to close");
4537         }
4538     }
4539 
4540     if (rc < 0) {
4541         LOGE("stopAllChannels failed");
4542         return rc;
4543     }
4544     if (mChannelHandle) {
4545         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4546                 mChannelHandle);
4547     }
4548 
4549     // Reset bundle info
4550     rc = setBundleInfo();
4551     if (rc < 0) {
4552         LOGE("setBundleInfo failed %d", rc);
4553         return rc;
4554     }
4555 
4556     // Mutex Lock
4557     pthread_mutex_lock(&mMutex);
4558 
4559     // Unblock process_capture_request
4560     mPendingLiveRequest = 0;
4561     pthread_cond_signal(&mRequestCond);
4562 
4563     rc = notifyErrorForPendingRequests();
4564     if (rc < 0) {
4565         LOGE("notifyErrorForPendingRequests failed");
4566         pthread_mutex_unlock(&mMutex);
4567         return rc;
4568     }
4569 
4570     mFlush = false;
4571 
4572     // Start the Streams/Channels
4573     if (restartChannels) {
4574         rc = startAllChannels();
4575         if (rc < 0) {
4576             LOGE("startAllChannels failed");
4577             pthread_mutex_unlock(&mMutex);
4578             return rc;
4579         }
4580     }
4581 
4582     if (mChannelHandle) {
4583         mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4584                     mChannelHandle);
4585         if (rc < 0) {
4586             LOGE("start_channel failed");
4587             pthread_mutex_unlock(&mMutex);
4588             return rc;
4589         }
4590     }
4591 
4592     pthread_mutex_unlock(&mMutex);
4593 
4594     return 0;
4595 }
4596 
4597 /*===========================================================================
4598  * FUNCTION   : flushPerf
4599  *
4600  * DESCRIPTION: This is the performance optimization version of flush that does
4601  *              not use stream off, rather flushes the system
4602  *
4603  * PARAMETERS :
4604  *
4605  *
4606  * RETURN     : 0 : success
4607  *              -EINVAL: input is malformed (device is not valid)
4608  *              -ENODEV: if the device has encountered a serious error
4609  *==========================================================================*/
flushPerf()4610 int QCamera3HardwareInterface::flushPerf()
4611 {
4612     ATRACE_CALL();
4613     int32_t rc = 0;
4614     struct timespec timeout;
4615     bool timed_wait = false;
4616 
4617     pthread_mutex_lock(&mMutex);
4618     mFlushPerf = true;
4619     mPendingBuffersMap.numPendingBufsAtFlush =
4620         mPendingBuffersMap.get_num_overall_buffers();
4621     LOGD("Calling flush. Wait for %d buffers to return",
4622         mPendingBuffersMap.numPendingBufsAtFlush);
4623 
4624     /* send the flush event to the backend */
4625     rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4626     if (rc < 0) {
4627         LOGE("Error in flush: IOCTL failure");
4628         mFlushPerf = false;
4629         pthread_mutex_unlock(&mMutex);
4630         return -ENODEV;
4631     }
4632 
4633     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4634         LOGD("No pending buffers in HAL, return flush");
4635         mFlushPerf = false;
4636         pthread_mutex_unlock(&mMutex);
4637         return rc;
4638     }
4639 
4640     /* wait on a signal that buffers were received */
4641     rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
4642     if (rc < 0) {
4643         LOGE("Error reading the real time clock, cannot use timed wait");
4644     } else {
4645         timeout.tv_sec += FLUSH_TIMEOUT;
4646         timed_wait = true;
4647     }
4648 
4649     //Block on conditional variable
4650     while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4651         LOGD("Waiting on mBuffersCond");
4652         if (!timed_wait) {
4653             rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4654             if (rc != 0) {
4655                  LOGE("pthread_cond_wait failed due to rc = %s",
4656                         strerror(rc));
4657                  break;
4658             }
4659         } else {
4660             rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4661             if (rc != 0) {
4662                 LOGE("pthread_cond_timedwait failed due to rc = %s",
4663                             strerror(rc));
4664                 break;
4665             }
4666         }
4667     }
4668     if (rc != 0) {
4669         mFlushPerf = false;
4670         pthread_mutex_unlock(&mMutex);
4671         return -ENODEV;
4672     }
4673 
4674     LOGD("Received buffers, now safe to return them");
4675 
4676     //make sure the channels handle flush
4677     //currently only required for the picture channel to release snapshot resources
4678     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4679             it != mStreamInfo.end(); it++) {
4680         QCamera3Channel *channel = (*it)->channel;
4681         if (channel) {
4682             rc = channel->flush();
4683             if (rc) {
4684                LOGE("Flushing the channels failed with error %d", rc);
4685                // even though the channel flush failed we need to continue and
4686                // return the buffers we have to the framework, however the return
4687                // value will be an error
4688                rc = -ENODEV;
4689             }
4690         }
4691     }
4692 
4693     /* notify the frameworks and send errored results */
4694     rc = notifyErrorForPendingRequests();
4695     if (rc < 0) {
4696         LOGE("notifyErrorForPendingRequests failed");
4697         pthread_mutex_unlock(&mMutex);
4698         return rc;
4699     }
4700 
4701     //unblock process_capture_request
4702     mPendingLiveRequest = 0;
4703     unblockRequestIfNecessary();
4704 
4705     mFlushPerf = false;
4706     pthread_mutex_unlock(&mMutex);
4707     LOGD ("Flush Operation complete. rc = %d", rc);
4708     return rc;
4709 }
4710 
4711 /*===========================================================================
4712  * FUNCTION   : handleCameraDeviceError
4713  *
4714  * DESCRIPTION: This function calls internal flush and notifies the error to
4715  *              framework and updates the state variable.
4716  *
4717  * PARAMETERS : None
4718  *
4719  * RETURN     : NO_ERROR on Success
4720  *              Error code on failure
4721  *==========================================================================*/
handleCameraDeviceError()4722 int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4723 {
4724     int32_t rc = NO_ERROR;
4725 
4726     pthread_mutex_lock(&mMutex);
4727     if (mState != ERROR) {
4728         //if mState != ERROR, nothing to be done
4729         pthread_mutex_unlock(&mMutex);
4730         return NO_ERROR;
4731     }
4732     pthread_mutex_unlock(&mMutex);
4733 
4734     rc = flush(false /* restart channels */);
4735     if (NO_ERROR != rc) {
4736         LOGE("internal flush to handle mState = ERROR failed");
4737     }
4738 
4739     pthread_mutex_lock(&mMutex);
4740     mState = DEINIT;
4741     pthread_mutex_unlock(&mMutex);
4742 
4743     camera3_notify_msg_t notify_msg;
4744     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4745     notify_msg.type = CAMERA3_MSG_ERROR;
4746     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4747     notify_msg.message.error.error_stream = NULL;
4748     notify_msg.message.error.frame_number = 0;
4749     mCallbackOps->notify(mCallbackOps, &notify_msg);
4750 
4751     return rc;
4752 }
4753 
4754 /*===========================================================================
4755  * FUNCTION   : captureResultCb
4756  *
4757  * DESCRIPTION: Callback handler for all capture result
4758  *              (streams, as well as metadata)
4759  *
4760  * PARAMETERS :
4761  *   @metadata : metadata information
4762  *   @buffer   : actual gralloc buffer to be returned to frameworks.
4763  *               NULL if metadata.
4764  *
4765  * RETURN     : NONE
4766  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer)4767 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4768                 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4769 {
4770     if (metadata_buf) {
4771         pthread_mutex_lock(&mMutex);
4772         uint8_t batchSize = mBatchSize;
4773         pthread_mutex_unlock(&mMutex);
4774         if (batchSize) {
4775             handleBatchMetadata(metadata_buf,
4776                     true /* free_and_bufdone_meta_buf */);
4777         } else { /* mBatchSize = 0 */
4778             hdrPlusPerfLock(metadata_buf);
4779             pthread_mutex_lock(&mMutex);
4780             handleMetadataWithLock(metadata_buf,
4781                     true /* free_and_bufdone_meta_buf */,
4782                     false /* first frame of batch metadata */ );
4783             pthread_mutex_unlock(&mMutex);
4784         }
4785     } else if (isInputBuffer) {
4786         pthread_mutex_lock(&mMutex);
4787         handleInputBufferWithLock(frame_number);
4788         pthread_mutex_unlock(&mMutex);
4789     } else {
4790         pthread_mutex_lock(&mMutex);
4791         handleBufferWithLock(buffer, frame_number);
4792         pthread_mutex_unlock(&mMutex);
4793     }
4794     return;
4795 }
4796 
4797 /*===========================================================================
4798  * FUNCTION   : getReprocessibleOutputStreamId
4799  *
4800  * DESCRIPTION: Get source output stream id for the input reprocess stream
4801  *              based on size and format, which would be the largest
4802  *              output stream if an input stream exists.
4803  *
4804  * PARAMETERS :
4805  *   @id      : return the stream id if found
4806  *
4807  * RETURN     : int32_t type of status
4808  *              NO_ERROR  -- success
4809  *              none-zero failure code
4810  *==========================================================================*/
getReprocessibleOutputStreamId(uint32_t & id)4811 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4812 {
4813     /* check if any output or bidirectional stream with the same size and format
4814        and return that stream */
4815     if ((mInputStreamInfo.dim.width > 0) &&
4816             (mInputStreamInfo.dim.height > 0)) {
4817         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4818                 it != mStreamInfo.end(); it++) {
4819 
4820             camera3_stream_t *stream = (*it)->stream;
4821             if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4822                     (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4823                     (stream->format == mInputStreamInfo.format)) {
4824                 // Usage flag for an input stream and the source output stream
4825                 // may be different.
4826                 LOGD("Found reprocessible output stream! %p", *it);
4827                 LOGD("input stream usage 0x%x, current stream usage 0x%x",
4828                          stream->usage, mInputStreamInfo.usage);
4829 
4830                 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4831                 if (channel != NULL && channel->mStreams[0]) {
4832                     id = channel->mStreams[0]->getMyServerID();
4833                     return NO_ERROR;
4834                 }
4835             }
4836         }
4837     } else {
4838         LOGD("No input stream, so no reprocessible output stream");
4839     }
4840     return NAME_NOT_FOUND;
4841 }
4842 
4843 /*===========================================================================
4844  * FUNCTION   : lookupFwkName
4845  *
4846  * DESCRIPTION: In case the enum is not same in fwk and backend
4847  *              make sure the parameter is correctly propogated
4848  *
4849  * PARAMETERS  :
4850  *   @arr      : map between the two enums
4851  *   @len      : len of the map
4852  *   @hal_name : name of the hal_parm to map
4853  *
4854  * RETURN     : int type of status
4855  *              fwk_name  -- success
4856  *              none-zero failure code
4857  *==========================================================================*/
lookupFwkName(const mapType * arr,size_t len,halType hal_name)4858 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4859         size_t len, halType hal_name)
4860 {
4861 
4862     for (size_t i = 0; i < len; i++) {
4863         if (arr[i].hal_name == hal_name) {
4864             return arr[i].fwk_name;
4865         }
4866     }
4867 
4868     /* Not able to find matching framework type is not necessarily
4869      * an error case. This happens when mm-camera supports more attributes
4870      * than the frameworks do */
4871     LOGH("Cannot find matching framework type");
4872     return NAME_NOT_FOUND;
4873 }
4874 
4875 /*===========================================================================
4876  * FUNCTION   : lookupHalName
4877  *
4878  * DESCRIPTION: In case the enum is not same in fwk and backend
4879  *              make sure the parameter is correctly propogated
4880  *
4881  * PARAMETERS  :
4882  *   @arr      : map between the two enums
4883  *   @len      : len of the map
4884  *   @fwk_name : name of the hal_parm to map
4885  *
4886  * RETURN     : int32_t type of status
4887  *              hal_name  -- success
4888  *              none-zero failure code
4889  *==========================================================================*/
lookupHalName(const mapType * arr,size_t len,fwkType fwk_name)4890 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4891         size_t len, fwkType fwk_name)
4892 {
4893     for (size_t i = 0; i < len; i++) {
4894         if (arr[i].fwk_name == fwk_name) {
4895             return arr[i].hal_name;
4896         }
4897     }
4898 
4899     LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
4900     return NAME_NOT_FOUND;
4901 }
4902 
4903 /*===========================================================================
4904  * FUNCTION   : lookupProp
4905  *
4906  * DESCRIPTION: lookup a value by its name
4907  *
4908  * PARAMETERS :
4909  *   @arr     : map between the two enums
4910  *   @len     : size of the map
4911  *   @name    : name to be looked up
4912  *
4913  * RETURN     : Value if found
4914  *              CAM_CDS_MODE_MAX if not found
4915  *==========================================================================*/
lookupProp(const mapType * arr,size_t len,const char * name)4916 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4917         size_t len, const char *name)
4918 {
4919     if (name) {
4920         for (size_t i = 0; i < len; i++) {
4921             if (!strcmp(arr[i].desc, name)) {
4922                 return arr[i].val;
4923             }
4924         }
4925     }
4926     return CAM_CDS_MODE_MAX;
4927 }
4928 
4929 /*===========================================================================
4930  *
4931  * DESCRIPTION:
4932  *
4933  * PARAMETERS :
4934  *   @metadata : metadata information from callback
4935  *   @timestamp: metadata buffer timestamp
4936  *   @request_id: request id
4937  *   @jpegMetadata: additional jpeg metadata
4938  *   @hybrid_ae_enable: whether hybrid ae is enabled
4939  *   // DevCamDebug metadata
4940  *   @DevCamDebug_meta_enable: enable DevCamDebug meta
4941  *   // DevCamDebug metadata end
4942  *   @pprocDone: whether internal offline postprocsesing is done
4943  *
4944  * RETURN     : camera_metadata_t*
4945  *              metadata in a format specified by fwk
4946  *==========================================================================*/
4947 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,nsecs_t timestamp,int32_t request_id,const CameraMetadata & jpegMetadata,uint8_t pipeline_depth,uint8_t capture_intent,uint8_t hybrid_ae_enable,uint8_t DevCamDebug_meta_enable,bool pprocDone,uint8_t fwk_cacMode,bool firstMetadataInBatch)4948 QCamera3HardwareInterface::translateFromHalMetadata(
4949                                  metadata_buffer_t *metadata,
4950                                  nsecs_t timestamp,
4951                                  int32_t request_id,
4952                                  const CameraMetadata& jpegMetadata,
4953                                  uint8_t pipeline_depth,
4954                                  uint8_t capture_intent,
4955                                  uint8_t hybrid_ae_enable,
4956                                  /* DevCamDebug metadata translateFromHalMetadata argument */
4957                                  uint8_t DevCamDebug_meta_enable,
4958                                  /* DevCamDebug metadata end */
4959                                  bool pprocDone,
4960                                  uint8_t fwk_cacMode,
4961                                  bool firstMetadataInBatch)
4962 {
4963     CameraMetadata camMetadata;
4964     camera_metadata_t *resultMetadata;
4965 
4966     if (mBatchSize && !firstMetadataInBatch) {
4967         /* In batch mode, use cached metadata from the first metadata
4968             in the batch */
4969         camMetadata.clear();
4970         camMetadata = mCachedMetadata;
4971     }
4972 
4973     if (jpegMetadata.entryCount())
4974         camMetadata.append(jpegMetadata);
4975 
4976     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4977     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4978     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4979     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4980     camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
4981     if (mBatchSize == 0) {
4982         // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
4983         camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
4984     }
4985 
4986     if (mBatchSize && !firstMetadataInBatch) {
4987         /* In batch mode, use cached metadata instead of parsing metadata buffer again */
4988         resultMetadata = camMetadata.release();
4989         return resultMetadata;
4990     }
4991 
4992     // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
4993     // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
4994     if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
4995         // DevCamDebug metadata translateFromHalMetadata AF
4996         IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
4997                 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
4998             int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
4999             camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
5000         }
5001         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
5002                 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
5003             int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
5004             camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
5005         }
5006         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
5007                 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
5008             int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
5009             camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
5010         }
5011         IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
5012                 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
5013             int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
5014             camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
5015         }
5016         IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
5017                 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
5018             int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
5019             camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
5020         }
5021         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
5022                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
5023             int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
5024                 *DevCamDebug_af_monitor_pdaf_target_pos;
5025             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
5026                 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
5027         }
5028         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
5029                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
5030             int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
5031                 *DevCamDebug_af_monitor_pdaf_confidence;
5032             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
5033                 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
5034         }
5035         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
5036                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
5037             int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
5038             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
5039                 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
5040         }
5041         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
5042                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
5043             int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
5044                 *DevCamDebug_af_monitor_tof_target_pos;
5045             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
5046                 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
5047         }
5048         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
5049                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
5050             int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
5051                 *DevCamDebug_af_monitor_tof_confidence;
5052             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
5053                 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
5054         }
5055         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
5056                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
5057             int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
5058             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
5059                 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
5060         }
5061         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
5062                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
5063             int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
5064             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
5065                 &fwk_DevCamDebug_af_monitor_type_select, 1);
5066         }
5067         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
5068                 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
5069             int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
5070             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
5071                 &fwk_DevCamDebug_af_monitor_refocus, 1);
5072         }
5073         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
5074                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
5075             int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
5076             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
5077                 &fwk_DevCamDebug_af_monitor_target_pos, 1);
5078         }
5079         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
5080                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
5081             int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
5082                 *DevCamDebug_af_search_pdaf_target_pos;
5083             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
5084                 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
5085         }
5086         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
5087                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
5088             int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
5089             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
5090                 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
5091         }
5092         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
5093                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
5094             int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
5095             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
5096                 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
5097         }
5098         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
5099                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
5100             int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
5101             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
5102                 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
5103         }
5104         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
5105                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
5106             int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
5107             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
5108                 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
5109         }
5110         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
5111                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
5112             int32_t fwk_DevCamDebug_af_search_tof_target_pos =
5113                 *DevCamDebug_af_search_tof_target_pos;
5114             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
5115                 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
5116         }
5117         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
5118                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
5119             int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
5120             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
5121                 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
5122         }
5123         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
5124                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
5125             int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
5126             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
5127                 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
5128         }
5129         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
5130                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
5131             int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
5132             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
5133                 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
5134         }
5135         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
5136                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
5137             int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
5138             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
5139                 &fwk_DevCamDebug_af_search_tof_confidence, 1);
5140         }
5141         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
5142                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
5143             int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
5144             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
5145                 &fwk_DevCamDebug_af_search_type_select, 1);
5146         }
5147         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
5148                 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
5149             int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
5150             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
5151                 &fwk_DevCamDebug_af_search_next_pos, 1);
5152         }
5153         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
5154                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
5155             int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
5156             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
5157                 &fwk_DevCamDebug_af_search_target_pos, 1);
5158         }
5159         // DevCamDebug metadata translateFromHalMetadata AEC
5160         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
5161                 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
5162             int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
5163             camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
5164     }
5165         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
5166                 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
5167             int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
5168             camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
5169         }
5170         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
5171                 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
5172             int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
5173             camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
5174         }
5175         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
5176                 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
5177             int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
5178             camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
5179         }
5180         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
5181                 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
5182             int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
5183             camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
5184         }
5185         IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
5186                 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
5187             float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
5188             camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
5189         }
5190         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
5191                 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
5192             int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
5193             camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
5194         }
5195         IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
5196                 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
5197             float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
5198             camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
5199         }
5200         // DevCamDebug metadata translateFromHalMetadata AWB
5201         IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
5202                 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
5203             float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
5204             camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
5205         }
5206         IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
5207                 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
5208             float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
5209             camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
5210         }
5211         IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
5212                 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
5213             float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
5214             camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
5215         }
5216         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
5217                 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
5218             int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
5219             camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
5220         }
5221         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
5222                 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
5223             int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
5224             camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
5225         }
5226     }
5227     // atrace_end(ATRACE_TAG_ALWAYS);
5228 
5229     IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
5230         int64_t fwk_frame_number = *frame_number;
5231         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
5232     }
5233 
5234     IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
5235         int32_t fps_range[2];
5236         fps_range[0] = (int32_t)float_range->min_fps;
5237         fps_range[1] = (int32_t)float_range->max_fps;
5238         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5239                                       fps_range, 2);
5240         LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
5241              fps_range[0], fps_range[1]);
5242     }
5243 
5244     IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5245         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5246     }
5247 
5248     IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5249         int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5250                 METADATA_MAP_SIZE(SCENE_MODES_MAP),
5251                 *sceneMode);
5252         if (NAME_NOT_FOUND != val) {
5253             uint8_t fwkSceneMode = (uint8_t)val;
5254             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5255             LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5256                      fwkSceneMode);
5257         }
5258     }
5259 
5260     IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5261         uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5262         camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5263     }
5264 
5265     IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5266         uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5267         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5268     }
5269 
5270     IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5271         uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5272         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5273     }
5274 
5275     IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5276             CAM_INTF_META_EDGE_MODE, metadata) {
5277         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5278     }
5279 
5280     IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5281         uint8_t fwk_flashPower = (uint8_t) *flashPower;
5282         camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5283     }
5284 
5285     IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5286         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5287     }
5288 
5289     IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5290         if (0 <= *flashState) {
5291             uint8_t fwk_flashState = (uint8_t) *flashState;
5292             if (!gCamCapability[mCameraId]->flash_available) {
5293                 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5294             }
5295             camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5296         }
5297     }
5298 
5299     IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5300         int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5301         if (NAME_NOT_FOUND != val) {
5302             uint8_t fwk_flashMode = (uint8_t)val;
5303             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5304         }
5305     }
5306 
5307     IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5308         uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5309         camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5310     }
5311 
5312     IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5313         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5314     }
5315 
5316     IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5317         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5318     }
5319 
5320     IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5321         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5322     }
5323 
5324     IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5325         uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5326         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5327     }
5328 
5329     IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5330         uint8_t fwk_videoStab = (uint8_t) *videoStab;
5331         LOGD("fwk_videoStab = %d", fwk_videoStab);
5332         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5333     } else {
5334         // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5335         // and so hardcoding the Video Stab result to OFF mode.
5336         uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5337         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
5338         LOGD("%s: EIS result default to OFF mode", __func__);
5339     }
5340 
5341     IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5342         uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5343         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5344     }
5345 
5346     IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5347         camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5348     }
5349 
5350     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5351         CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
5352         float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
5353 
5354         adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
5355               gCamCapability[mCameraId]->color_arrangement);
5356 
5357         LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
5358           blackLevelAppliedPattern->cam_black_level[0],
5359           blackLevelAppliedPattern->cam_black_level[1],
5360           blackLevelAppliedPattern->cam_black_level[2],
5361           blackLevelAppliedPattern->cam_black_level[3]);
5362         camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
5363                 BLACK_LEVEL_PATTERN_CNT);
5364 
5365         // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
5366         // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
5367         // depth space.
5368         fwk_blackLevelInd[0] /= 4.0;
5369         fwk_blackLevelInd[1] /= 4.0;
5370         fwk_blackLevelInd[2] /= 4.0;
5371         fwk_blackLevelInd[3] /= 4.0;
5372         camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
5373                 BLACK_LEVEL_PATTERN_CNT);
5374     }
5375 
5376     // Fixed whitelevel is used by ISP/Sensor
5377     camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
5378             &gCamCapability[mCameraId]->white_level, 1);
5379 
5380     IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
5381             CAM_INTF_META_SCALER_CROP_REGION, metadata) {
5382         int32_t scalerCropRegion[4];
5383         scalerCropRegion[0] = hScalerCropRegion->left;
5384         scalerCropRegion[1] = hScalerCropRegion->top;
5385         scalerCropRegion[2] = hScalerCropRegion->width;
5386         scalerCropRegion[3] = hScalerCropRegion->height;
5387 
5388         // Adjust crop region from sensor output coordinate system to active
5389         // array coordinate system.
5390         mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
5391                 scalerCropRegion[2], scalerCropRegion[3]);
5392 
5393         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
5394     }
5395 
5396     IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
5397         LOGD("sensorExpTime = %lld", *sensorExpTime);
5398         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
5399     }
5400 
5401     IF_META_AVAILABLE(int64_t, sensorFameDuration,
5402             CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
5403         LOGD("sensorFameDuration = %lld", *sensorFameDuration);
5404         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
5405     }
5406 
5407     IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
5408             CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
5409         LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
5410         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
5411                 sensorRollingShutterSkew, 1);
5412     }
5413 
5414     IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
5415         LOGD("sensorSensitivity = %d", *sensorSensitivity);
5416         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
5417 
5418         //calculate the noise profile based on sensitivity
5419         double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
5420         double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
5421         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
5422         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
5423             noise_profile[i]   = noise_profile_S;
5424             noise_profile[i+1] = noise_profile_O;
5425         }
5426         LOGD("noise model entry (S, O) is (%f, %f)",
5427                 noise_profile_S, noise_profile_O);
5428         camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
5429                 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
5430     }
5431 
5432     int32_t fwk_ispSensitivity = 100;
5433     IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
5434         fwk_ispSensitivity = (int32_t) *ispSensitivity;
5435     }
5436     IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
5437         fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
5438     }
5439     camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
5440 
5441     IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
5442         uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
5443         camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
5444     }
5445 
5446     IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
5447         int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
5448                 *faceDetectMode);
5449         if (NAME_NOT_FOUND != val) {
5450             uint8_t fwk_faceDetectMode = (uint8_t)val;
5451             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
5452 
5453             if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
5454                 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
5455                         CAM_INTF_META_FACE_DETECTION, metadata) {
5456                     uint8_t numFaces = MIN(
5457                             faceDetectionInfo->num_faces_detected, MAX_ROI);
5458                     int32_t faceIds[MAX_ROI];
5459                     uint8_t faceScores[MAX_ROI];
5460                     int32_t faceRectangles[MAX_ROI * 4];
5461                     int32_t faceLandmarks[MAX_ROI * 6];
5462                     size_t j = 0, k = 0;
5463 
5464                     for (size_t i = 0; i < numFaces; i++) {
5465                         faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
5466                         // Adjust crop region from sensor output coordinate system to active
5467                         // array coordinate system.
5468                         cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
5469                         mCropRegionMapper.toActiveArray(rect.left, rect.top,
5470                                 rect.width, rect.height);
5471 
5472                         convertToRegions(faceDetectionInfo->faces[i].face_boundary,
5473                                 faceRectangles+j, -1);
5474 
5475                         j+= 4;
5476                     }
5477                     if (numFaces <= 0) {
5478                         memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
5479                         memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
5480                         memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
5481                         memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
5482                     }
5483 
5484                     camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
5485                             numFaces);
5486                     camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
5487                             faceRectangles, numFaces * 4U);
5488                     if (fwk_faceDetectMode ==
5489                             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
5490                         IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
5491                                 CAM_INTF_META_FACE_LANDMARK, metadata) {
5492 
5493                             for (size_t i = 0; i < numFaces; i++) {
5494                                 // Map the co-ordinate sensor output coordinate system to active
5495                                 // array coordinate system.
5496                                 mCropRegionMapper.toActiveArray(
5497                                         landmarks->face_landmarks[i].left_eye_center.x,
5498                                         landmarks->face_landmarks[i].left_eye_center.y);
5499                                 mCropRegionMapper.toActiveArray(
5500                                         landmarks->face_landmarks[i].right_eye_center.x,
5501                                         landmarks->face_landmarks[i].right_eye_center.y);
5502                                 mCropRegionMapper.toActiveArray(
5503                                         landmarks->face_landmarks[i].mouth_center.x,
5504                                         landmarks->face_landmarks[i].mouth_center.y);
5505 
5506                                 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
5507                                 k+= 6;
5508                             }
5509                         }
5510 
5511                         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
5512                         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
5513                                 faceLandmarks, numFaces * 6U);
5514                    }
5515                 }
5516             }
5517         }
5518     }
5519 
5520     IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
5521         uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
5522         camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
5523     }
5524 
5525     IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
5526             CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
5527         uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
5528         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
5529     }
5530 
5531     IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
5532             CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
5533         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
5534                 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
5535     }
5536 
5537     IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
5538             CAM_INTF_META_LENS_SHADING_MAP, metadata) {
5539         size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
5540                 CAM_MAX_SHADING_MAP_HEIGHT);
5541         size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
5542                 CAM_MAX_SHADING_MAP_WIDTH);
5543         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
5544                 lensShadingMap->lens_shading, 4U * map_width * map_height);
5545     }
5546 
5547     IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
5548         uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
5549         camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
5550     }
5551 
5552     IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
5553         //Populate CAM_INTF_META_TONEMAP_CURVES
5554         /* ch0 = G, ch 1 = B, ch 2 = R*/
5555         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5556             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5557                      tonemap->tonemap_points_cnt,
5558                     CAM_MAX_TONEMAP_CURVE_SIZE);
5559             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5560         }
5561 
5562         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
5563                         &tonemap->curves[0].tonemap_points[0][0],
5564                         tonemap->tonemap_points_cnt * 2);
5565 
5566         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
5567                         &tonemap->curves[1].tonemap_points[0][0],
5568                         tonemap->tonemap_points_cnt * 2);
5569 
5570         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
5571                         &tonemap->curves[2].tonemap_points[0][0],
5572                         tonemap->tonemap_points_cnt * 2);
5573     }
5574 
5575     IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
5576             CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
5577         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
5578                 CC_GAINS_COUNT);
5579     }
5580 
5581     IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
5582             CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
5583         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
5584                 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
5585                 CC_MATRIX_COLS * CC_MATRIX_ROWS);
5586     }
5587 
5588     IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
5589             CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
5590         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5591             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5592                      toneCurve->tonemap_points_cnt,
5593                     CAM_MAX_TONEMAP_CURVE_SIZE);
5594             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5595         }
5596         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
5597                 (float*)toneCurve->curve.tonemap_points,
5598                 toneCurve->tonemap_points_cnt * 2);
5599     }
5600 
5601     IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
5602             CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
5603         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
5604                 predColorCorrectionGains->gains, 4);
5605     }
5606 
5607     IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
5608             CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
5609         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
5610                 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
5611                 CC_MATRIX_ROWS * CC_MATRIX_COLS);
5612     }
5613 
5614     IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
5615         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
5616     }
5617 
5618     IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
5619         uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
5620         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
5621     }
5622 
5623     IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
5624         uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
5625         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
5626     }
5627 
5628     IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
5629         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5630                 *effectMode);
5631         if (NAME_NOT_FOUND != val) {
5632             uint8_t fwk_effectMode = (uint8_t)val;
5633             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
5634         }
5635     }
5636 
5637     IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
5638             CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
5639         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
5640                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
5641         if (NAME_NOT_FOUND != fwk_testPatternMode) {
5642             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
5643         }
5644         int32_t fwk_testPatternData[4];
5645         fwk_testPatternData[0] = testPatternData->r;
5646         fwk_testPatternData[3] = testPatternData->b;
5647         switch (gCamCapability[mCameraId]->color_arrangement) {
5648         case CAM_FILTER_ARRANGEMENT_RGGB:
5649         case CAM_FILTER_ARRANGEMENT_GRBG:
5650             fwk_testPatternData[1] = testPatternData->gr;
5651             fwk_testPatternData[2] = testPatternData->gb;
5652             break;
5653         case CAM_FILTER_ARRANGEMENT_GBRG:
5654         case CAM_FILTER_ARRANGEMENT_BGGR:
5655             fwk_testPatternData[2] = testPatternData->gr;
5656             fwk_testPatternData[1] = testPatternData->gb;
5657             break;
5658         default:
5659             LOGE("color arrangement %d is not supported",
5660                 gCamCapability[mCameraId]->color_arrangement);
5661             break;
5662         }
5663         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
5664     }
5665 
5666     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
5667         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
5668     }
5669 
5670     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
5671         String8 str((const char *)gps_methods);
5672         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
5673     }
5674 
5675     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
5676         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
5677     }
5678 
5679     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
5680         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
5681     }
5682 
5683     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
5684         uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
5685         camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
5686     }
5687 
5688     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
5689         uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
5690         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
5691     }
5692 
5693     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5694         int32_t fwk_thumb_size[2];
5695         fwk_thumb_size[0] = thumb_size->width;
5696         fwk_thumb_size[1] = thumb_size->height;
5697         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5698     }
5699 
5700     // Skip reprocess metadata for high speed mode.
5701     if (mBatchSize == 0) {
5702         IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5703             camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5704                      privateData,
5705                      MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5706         }
5707     }
5708 
5709     if (metadata->is_tuning_params_valid) {
5710         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5711         uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5712         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5713 
5714 
5715         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5716                 sizeof(uint32_t));
5717         data += sizeof(uint32_t);
5718 
5719         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5720                 sizeof(uint32_t));
5721         LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5722         data += sizeof(uint32_t);
5723 
5724         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5725                 sizeof(uint32_t));
5726         LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5727         data += sizeof(uint32_t);
5728 
5729         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5730                 sizeof(uint32_t));
5731         LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5732         data += sizeof(uint32_t);
5733 
5734         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5735                 sizeof(uint32_t));
5736         LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5737         data += sizeof(uint32_t);
5738 
5739         metadata->tuning_params.tuning_mod3_data_size = 0;
5740         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5741                 sizeof(uint32_t));
5742         LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5743         data += sizeof(uint32_t);
5744 
5745         size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5746                 TUNING_SENSOR_DATA_MAX);
5747         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5748                 count);
5749         data += count;
5750 
5751         count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5752                 TUNING_VFE_DATA_MAX);
5753         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5754                 count);
5755         data += count;
5756 
5757         count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5758                 TUNING_CPP_DATA_MAX);
5759         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5760                 count);
5761         data += count;
5762 
5763         count = MIN(metadata->tuning_params.tuning_cac_data_size,
5764                 TUNING_CAC_DATA_MAX);
5765         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5766                 count);
5767         data += count;
5768 
5769         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5770                 (int32_t *)(void *)tuning_meta_data_blob,
5771                 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5772     }
5773 
5774     IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5775             CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5776         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5777                 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5778                 NEUTRAL_COL_POINTS);
5779     }
5780 
5781     IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5782         uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5783         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5784     }
5785 
5786     IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5787         int32_t aeRegions[REGIONS_TUPLE_COUNT];
5788         // Adjust crop region from sensor output coordinate system to active
5789         // array coordinate system.
5790         mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5791                 hAeRegions->rect.width, hAeRegions->rect.height);
5792 
5793         convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5794         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5795                 REGIONS_TUPLE_COUNT);
5796         LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5797                  aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5798                 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5799                 hAeRegions->rect.height);
5800     }
5801 
5802     IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
5803         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
5804         if (NAME_NOT_FOUND != val) {
5805             uint8_t fwkAfMode = (uint8_t)val;
5806             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
5807             LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
5808         } else {
5809             LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
5810                     val);
5811         }
5812     }
5813 
5814     IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5815         uint8_t fwk_afState = (uint8_t) *afState;
5816         camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5817         LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5818     }
5819 
5820     IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5821         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5822     }
5823 
5824     IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5825         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5826     }
5827 
5828     IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5829         uint8_t fwk_lensState = *lensState;
5830         camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5831     }
5832 
5833     IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5834         /*af regions*/
5835         int32_t afRegions[REGIONS_TUPLE_COUNT];
5836         // Adjust crop region from sensor output coordinate system to active
5837         // array coordinate system.
5838         mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5839                 hAfRegions->rect.width, hAfRegions->rect.height);
5840 
5841         convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5842         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5843                 REGIONS_TUPLE_COUNT);
5844         LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5845                  afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5846                 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5847                 hAfRegions->rect.height);
5848     }
5849 
5850     IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5851         uint32_t ab_mode = *hal_ab_mode;
5852         if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
5853                 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
5854               ab_mode = CAM_ANTIBANDING_MODE_AUTO;
5855         }
5856         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5857                 ab_mode);
5858         if (NAME_NOT_FOUND != val) {
5859             uint8_t fwk_ab_mode = (uint8_t)val;
5860             camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5861         }
5862     }
5863 
5864     IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5865         int val = lookupFwkName(SCENE_MODES_MAP,
5866                 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
5867         if (NAME_NOT_FOUND != val) {
5868             uint8_t fwkBestshotMode = (uint8_t)val;
5869             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
5870             LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
5871         } else {
5872             LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
5873         }
5874     }
5875 
5876     IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
5877          uint8_t fwk_mode = (uint8_t) *mode;
5878          camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
5879     }
5880 
5881     /* Constant metadata values to be update*/
5882     uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
5883     camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
5884 
5885     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5886     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5887 
5888     int32_t hotPixelMap[2];
5889     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
5890 
5891     // CDS
5892     IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
5893         camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
5894     }
5895 
5896     // TNR
5897     IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
5898         uint8_t tnr_enable       = tnr->denoise_enable;
5899         int32_t tnr_process_type = (int32_t)tnr->process_plates;
5900 
5901         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
5902         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
5903     }
5904 
5905     // Reprocess crop data
5906     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
5907         uint8_t cnt = crop_data->num_of_streams;
5908         if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
5909             // mm-qcamera-daemon only posts crop_data for streams
5910             // not linked to pproc. So no valid crop metadata is not
5911             // necessarily an error case.
5912             LOGD("No valid crop metadata entries");
5913         } else {
5914             uint32_t reproc_stream_id;
5915             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5916                 LOGD("No reprocessible stream found, ignore crop data");
5917             } else {
5918                 int rc = NO_ERROR;
5919                 Vector<int32_t> roi_map;
5920                 int32_t *crop = new int32_t[cnt*4];
5921                 if (NULL == crop) {
5922                    rc = NO_MEMORY;
5923                 }
5924                 if (NO_ERROR == rc) {
5925                     int32_t streams_found = 0;
5926                     for (size_t i = 0; i < cnt; i++) {
5927                         if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
5928                             if (pprocDone) {
5929                                 // HAL already does internal reprocessing,
5930                                 // either via reprocessing before JPEG encoding,
5931                                 // or offline postprocessing for pproc bypass case.
5932                                 crop[0] = 0;
5933                                 crop[1] = 0;
5934                                 crop[2] = mInputStreamInfo.dim.width;
5935                                 crop[3] = mInputStreamInfo.dim.height;
5936                             } else {
5937                                 crop[0] = crop_data->crop_info[i].crop.left;
5938                                 crop[1] = crop_data->crop_info[i].crop.top;
5939                                 crop[2] = crop_data->crop_info[i].crop.width;
5940                                 crop[3] = crop_data->crop_info[i].crop.height;
5941                             }
5942                             roi_map.add(crop_data->crop_info[i].roi_map.left);
5943                             roi_map.add(crop_data->crop_info[i].roi_map.top);
5944                             roi_map.add(crop_data->crop_info[i].roi_map.width);
5945                             roi_map.add(crop_data->crop_info[i].roi_map.height);
5946                             streams_found++;
5947                             LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
5948                                     crop[0], crop[1], crop[2], crop[3]);
5949                             LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
5950                                     crop_data->crop_info[i].roi_map.left,
5951                                     crop_data->crop_info[i].roi_map.top,
5952                                     crop_data->crop_info[i].roi_map.width,
5953                                     crop_data->crop_info[i].roi_map.height);
5954                             break;
5955 
5956                        }
5957                     }
5958                     camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
5959                             &streams_found, 1);
5960                     camMetadata.update(QCAMERA3_CROP_REPROCESS,
5961                             crop, (size_t)(streams_found * 4));
5962                     if (roi_map.array()) {
5963                         camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
5964                                 roi_map.array(), roi_map.size());
5965                     }
5966                }
5967                if (crop) {
5968                    delete [] crop;
5969                }
5970             }
5971         }
5972     }
5973 
5974     if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
5975         // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
5976         // so hardcoding the CAC result to OFF mode.
5977         uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5978         camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
5979     } else {
5980         IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
5981             int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5982                     *cacMode);
5983             if (NAME_NOT_FOUND != val) {
5984                 uint8_t resultCacMode = (uint8_t)val;
5985                 // check whether CAC result from CB is equal to Framework set CAC mode
5986                 // If not equal then set the CAC mode came in corresponding request
5987                 if (fwk_cacMode != resultCacMode) {
5988                     resultCacMode = fwk_cacMode;
5989                 }
5990                 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
5991                 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
5992             } else {
5993                 LOGE("Invalid CAC camera parameter: %d", *cacMode);
5994             }
5995         }
5996     }
5997 
5998     // Post blob of cam_cds_data through vendor tag.
5999     IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
6000         uint8_t cnt = cdsInfo->num_of_streams;
6001         cam_cds_data_t cdsDataOverride;
6002         memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
6003         cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
6004         cdsDataOverride.num_of_streams = 1;
6005         if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
6006             uint32_t reproc_stream_id;
6007             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6008                 LOGD("No reprocessible stream found, ignore cds data");
6009             } else {
6010                 for (size_t i = 0; i < cnt; i++) {
6011                     if (cdsInfo->cds_info[i].stream_id ==
6012                             reproc_stream_id) {
6013                         cdsDataOverride.cds_info[0].cds_enable =
6014                                 cdsInfo->cds_info[i].cds_enable;
6015                         break;
6016                     }
6017                 }
6018             }
6019         } else {
6020             LOGD("Invalid stream count %d in CDS_DATA", cnt);
6021         }
6022         camMetadata.update(QCAMERA3_CDS_INFO,
6023                 (uint8_t *)&cdsDataOverride,
6024                 sizeof(cam_cds_data_t));
6025     }
6026 
6027     // Ldaf calibration data
6028     if (!mLdafCalibExist) {
6029         IF_META_AVAILABLE(uint32_t, ldafCalib,
6030                 CAM_INTF_META_LDAF_EXIF, metadata) {
6031             mLdafCalibExist = true;
6032             mLdafCalib[0] = ldafCalib[0];
6033             mLdafCalib[1] = ldafCalib[1];
6034             LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
6035                     ldafCalib[0], ldafCalib[1]);
6036         }
6037     }
6038 
6039     // AF scene change
6040     IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
6041         camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
6042     }
6043 
6044     /* In batch mode, cache the first metadata in the batch */
6045     if (mBatchSize && firstMetadataInBatch) {
6046         mCachedMetadata.clear();
6047         mCachedMetadata = camMetadata;
6048     }
6049 
6050     resultMetadata = camMetadata.release();
6051     return resultMetadata;
6052 }
6053 
6054 /*===========================================================================
6055  * FUNCTION   : saveExifParams
6056  *
6057  * DESCRIPTION:
6058  *
6059  * PARAMETERS :
6060  *   @metadata : metadata information from callback
6061  *
6062  * RETURN     : none
6063  *
6064  *==========================================================================*/
saveExifParams(metadata_buffer_t * metadata)6065 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
6066 {
6067     IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
6068             CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
6069         if (mExifParams.debug_params) {
6070             mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
6071             mExifParams.debug_params->ae_debug_params_valid = TRUE;
6072         }
6073     }
6074     IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
6075             CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
6076         if (mExifParams.debug_params) {
6077             mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
6078             mExifParams.debug_params->awb_debug_params_valid = TRUE;
6079         }
6080     }
6081     IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
6082             CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
6083         if (mExifParams.debug_params) {
6084             mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
6085             mExifParams.debug_params->af_debug_params_valid = TRUE;
6086         }
6087     }
6088     IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
6089             CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
6090         if (mExifParams.debug_params) {
6091             mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
6092             mExifParams.debug_params->asd_debug_params_valid = TRUE;
6093         }
6094     }
6095     IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
6096             CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
6097         if (mExifParams.debug_params) {
6098             mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
6099             mExifParams.debug_params->stats_debug_params_valid = TRUE;
6100         }
6101     }
6102     IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
6103             CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
6104         if (mExifParams.debug_params) {
6105             mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
6106             mExifParams.debug_params->bestats_debug_params_valid = TRUE;
6107         }
6108     }
6109     IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
6110             CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
6111         if (mExifParams.debug_params) {
6112             mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
6113             mExifParams.debug_params->bhist_debug_params_valid = TRUE;
6114         }
6115     }
6116     IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
6117             CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
6118         if (mExifParams.debug_params) {
6119             mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
6120             mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
6121         }
6122     }
6123 }
6124 
6125 /*===========================================================================
6126  * FUNCTION   : get3AExifParams
6127  *
6128  * DESCRIPTION:
6129  *
6130  * PARAMETERS : none
6131  *
6132  *
6133  * RETURN     : mm_jpeg_exif_params_t
6134  *
6135  *==========================================================================*/
get3AExifParams()6136 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6137 {
6138     return mExifParams;
6139 }
6140 
6141 /*===========================================================================
6142  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
6143  *
6144  * DESCRIPTION:
6145  *
6146  * PARAMETERS :
6147  *   @metadata : metadata information from callback
6148  *
6149  * RETURN     : camera_metadata_t*
6150  *              metadata in a format specified by fwk
6151  *==========================================================================*/
6152 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata)6153 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6154                                 (metadata_buffer_t *metadata)
6155 {
6156     CameraMetadata camMetadata;
6157     camera_metadata_t *resultMetadata;
6158 
6159 
6160     IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6161         uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6162         camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6163         LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6164     }
6165 
6166     IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6167         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6168                 &aecTrigger->trigger, 1);
6169         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6170                 &aecTrigger->trigger_id, 1);
6171         LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6172                  aecTrigger->trigger);
6173         LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6174                 aecTrigger->trigger_id);
6175     }
6176 
6177     IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6178         uint8_t fwk_ae_state = (uint8_t) *ae_state;
6179         camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6180         LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6181     }
6182 
6183     IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6184         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6185                 &af_trigger->trigger, 1);
6186         LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6187                  af_trigger->trigger);
6188         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
6189         LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6190                 af_trigger->trigger_id);
6191     }
6192 
6193     IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6194         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6195                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6196         if (NAME_NOT_FOUND != val) {
6197             uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6198             camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6199             LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6200         } else {
6201             LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6202         }
6203     }
6204 
6205     uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6206     uint32_t aeMode = CAM_AE_MODE_MAX;
6207     int32_t flashMode = CAM_FLASH_MODE_MAX;
6208     int32_t redeye = -1;
6209     IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6210         aeMode = *pAeMode;
6211     }
6212     IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6213         flashMode = *pFlashMode;
6214     }
6215     IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6216         redeye = *pRedeye;
6217     }
6218 
6219     if (1 == redeye) {
6220         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6221         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6222     } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
6223         int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6224                 flashMode);
6225         if (NAME_NOT_FOUND != val) {
6226             fwk_aeMode = (uint8_t)val;
6227             camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6228         } else {
6229             LOGE("Unsupported flash mode %d", flashMode);
6230         }
6231     } else if (aeMode == CAM_AE_MODE_ON) {
6232         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
6233         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6234     } else if (aeMode == CAM_AE_MODE_OFF) {
6235         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6236         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6237     } else {
6238         LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
6239               "flashMode:%d, aeMode:%u!!!",
6240                  redeye, flashMode, aeMode);
6241     }
6242 
6243     resultMetadata = camMetadata.release();
6244     return resultMetadata;
6245 }
6246 
6247 /*===========================================================================
6248  * FUNCTION   : dumpMetadataToFile
6249  *
6250  * DESCRIPTION: Dumps tuning metadata to file system
6251  *
6252  * PARAMETERS :
6253  *   @meta           : tuning metadata
6254  *   @dumpFrameCount : current dump frame count
6255  *   @enabled        : Enable mask
6256  *
6257  *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,bool enabled,const char * type,uint32_t frameNumber)6258 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
6259                                                    uint32_t &dumpFrameCount,
6260                                                    bool enabled,
6261                                                    const char *type,
6262                                                    uint32_t frameNumber)
6263 {
6264     //Some sanity checks
6265     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
6266         LOGE("Tuning sensor data size bigger than expected %d: %d",
6267               meta.tuning_sensor_data_size,
6268               TUNING_SENSOR_DATA_MAX);
6269         return;
6270     }
6271 
6272     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
6273         LOGE("Tuning VFE data size bigger than expected %d: %d",
6274               meta.tuning_vfe_data_size,
6275               TUNING_VFE_DATA_MAX);
6276         return;
6277     }
6278 
6279     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
6280         LOGE("Tuning CPP data size bigger than expected %d: %d",
6281               meta.tuning_cpp_data_size,
6282               TUNING_CPP_DATA_MAX);
6283         return;
6284     }
6285 
6286     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
6287         LOGE("Tuning CAC data size bigger than expected %d: %d",
6288               meta.tuning_cac_data_size,
6289               TUNING_CAC_DATA_MAX);
6290         return;
6291     }
6292     //
6293 
6294     if(enabled){
6295         char timeBuf[FILENAME_MAX];
6296         char buf[FILENAME_MAX];
6297         memset(buf, 0, sizeof(buf));
6298         memset(timeBuf, 0, sizeof(timeBuf));
6299         time_t current_time;
6300         struct tm * timeinfo;
6301         time (&current_time);
6302         timeinfo = localtime (&current_time);
6303         if (timeinfo != NULL) {
6304             /* Consistent naming for Jpeg+meta+raw: meta name */
6305             strftime (timeBuf, sizeof(timeBuf),
6306                     QCAMERA_DUMP_FRM_LOCATION"IMG_%Y%m%d_%H%M%S", timeinfo);
6307             /* Consistent naming for Jpeg+meta+raw: meta name end*/
6308         }
6309         String8 filePath(timeBuf);
6310          /* Consistent naming for Jpeg+meta+raw */
6311         snprintf(buf,
6312                 sizeof(buf),
6313                 "%dm_%s_%d.bin",
6314                 dumpFrameCount,
6315                 type,
6316                 frameNumber);
6317          /* Consistent naming for Jpeg+meta+raw end */
6318         filePath.append(buf);
6319         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
6320         if (file_fd >= 0) {
6321             ssize_t written_len = 0;
6322             meta.tuning_data_version = TUNING_DATA_VERSION;
6323             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
6324             written_len += write(file_fd, data, sizeof(uint32_t));
6325             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
6326             LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6327             written_len += write(file_fd, data, sizeof(uint32_t));
6328             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
6329             LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6330             written_len += write(file_fd, data, sizeof(uint32_t));
6331             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
6332             LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6333             written_len += write(file_fd, data, sizeof(uint32_t));
6334             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
6335             LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6336             written_len += write(file_fd, data, sizeof(uint32_t));
6337             meta.tuning_mod3_data_size = 0;
6338             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
6339             LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6340             written_len += write(file_fd, data, sizeof(uint32_t));
6341             size_t total_size = meta.tuning_sensor_data_size;
6342             data = (void *)((uint8_t *)&meta.data);
6343             written_len += write(file_fd, data, total_size);
6344             total_size = meta.tuning_vfe_data_size;
6345             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
6346             written_len += write(file_fd, data, total_size);
6347             total_size = meta.tuning_cpp_data_size;
6348             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
6349             written_len += write(file_fd, data, total_size);
6350             total_size = meta.tuning_cac_data_size;
6351             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
6352             written_len += write(file_fd, data, total_size);
6353             close(file_fd);
6354         }else {
6355             LOGE("fail to open file for metadata dumping");
6356         }
6357     }
6358 }
6359 
6360 /*===========================================================================
6361  * FUNCTION   : cleanAndSortStreamInfo
6362  *
6363  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
6364  *              and sort them such that raw stream is at the end of the list
6365  *              This is a workaround for camera daemon constraint.
6366  *
6367  * PARAMETERS : None
6368  *
6369  *==========================================================================*/
cleanAndSortStreamInfo()6370 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
6371 {
6372     List<stream_info_t *> newStreamInfo;
6373 
6374     /*clean up invalid streams*/
6375     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
6376             it != mStreamInfo.end();) {
6377         if(((*it)->status) == INVALID){
6378             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
6379             delete channel;
6380             free(*it);
6381             it = mStreamInfo.erase(it);
6382         } else {
6383             it++;
6384         }
6385     }
6386 
6387     // Move preview/video/callback/snapshot streams into newList
6388     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6389             it != mStreamInfo.end();) {
6390         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
6391                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
6392                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
6393             newStreamInfo.push_back(*it);
6394             it = mStreamInfo.erase(it);
6395         } else
6396             it++;
6397     }
6398     // Move raw streams into newList
6399     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6400             it != mStreamInfo.end();) {
6401         newStreamInfo.push_back(*it);
6402         it = mStreamInfo.erase(it);
6403     }
6404 
6405     mStreamInfo = newStreamInfo;
6406 }
6407 
6408 /*===========================================================================
6409  * FUNCTION   : extractJpegMetadata
6410  *
6411  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
6412  *              JPEG metadata is cached in HAL, and return as part of capture
6413  *              result when metadata is returned from camera daemon.
6414  *
6415  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
6416  *              @request:      capture request
6417  *
6418  *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)6419 void QCamera3HardwareInterface::extractJpegMetadata(
6420         CameraMetadata& jpegMetadata,
6421         const camera3_capture_request_t *request)
6422 {
6423     CameraMetadata frame_settings;
6424     frame_settings = request->settings;
6425 
6426     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
6427         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
6428                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
6429                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
6430 
6431     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
6432         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
6433                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
6434                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
6435 
6436     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
6437         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
6438                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
6439                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
6440 
6441     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
6442         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
6443                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
6444                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
6445 
6446     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
6447         jpegMetadata.update(ANDROID_JPEG_QUALITY,
6448                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
6449                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
6450 
6451     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
6452         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
6453                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
6454                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
6455 
6456     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
6457         int32_t thumbnail_size[2];
6458         thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6459         thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6460         if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6461             int32_t orientation =
6462                   frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
6463             if ((orientation == 90) || (orientation == 270)) {
6464                //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
6465                int32_t temp;
6466                temp = thumbnail_size[0];
6467                thumbnail_size[0] = thumbnail_size[1];
6468                thumbnail_size[1] = temp;
6469             }
6470          }
6471          jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
6472                 thumbnail_size,
6473                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
6474     }
6475 
6476 }
6477 
6478 /*===========================================================================
6479  * FUNCTION   : convertToRegions
6480  *
6481  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
6482  *
6483  * PARAMETERS :
6484  *   @rect   : cam_rect_t struct to convert
6485  *   @region : int32_t destination array
6486  *   @weight : if we are converting from cam_area_t, weight is valid
6487  *             else weight = -1
6488  *
6489  *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)6490 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
6491         int32_t *region, int weight)
6492 {
6493     region[0] = rect.left;
6494     region[1] = rect.top;
6495     region[2] = rect.left + rect.width;
6496     region[3] = rect.top + rect.height;
6497     if (weight > -1) {
6498         region[4] = weight;
6499     }
6500 }
6501 
6502 /*===========================================================================
6503  * FUNCTION   : convertFromRegions
6504  *
6505  * DESCRIPTION: helper method to convert from array to cam_rect_t
6506  *
6507  * PARAMETERS :
6508  *   @rect   : cam_rect_t struct to convert
6509  *   @region : int32_t destination array
6510  *   @weight : if we are converting from cam_area_t, weight is valid
6511  *             else weight = -1
6512  *
6513  *==========================================================================*/
convertFromRegions(cam_area_t & roi,const camera_metadata_t * settings,uint32_t tag)6514 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
6515         const camera_metadata_t *settings, uint32_t tag)
6516 {
6517     CameraMetadata frame_settings;
6518     frame_settings = settings;
6519     int32_t x_min = frame_settings.find(tag).data.i32[0];
6520     int32_t y_min = frame_settings.find(tag).data.i32[1];
6521     int32_t x_max = frame_settings.find(tag).data.i32[2];
6522     int32_t y_max = frame_settings.find(tag).data.i32[3];
6523     roi.weight = frame_settings.find(tag).data.i32[4];
6524     roi.rect.left = x_min;
6525     roi.rect.top = y_min;
6526     roi.rect.width = x_max - x_min;
6527     roi.rect.height = y_max - y_min;
6528 }
6529 
6530 /*===========================================================================
6531  * FUNCTION   : resetIfNeededROI
6532  *
6533  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
6534  *              crop region
6535  *
6536  * PARAMETERS :
6537  *   @roi       : cam_area_t struct to resize
6538  *   @scalerCropRegion : cam_crop_region_t region to compare against
6539  *
6540  *
6541  *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)6542 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
6543                                                  const cam_crop_region_t* scalerCropRegion)
6544 {
6545     int32_t roi_x_max = roi->rect.width + roi->rect.left;
6546     int32_t roi_y_max = roi->rect.height + roi->rect.top;
6547     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
6548     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
6549 
6550     /* According to spec weight = 0 is used to indicate roi needs to be disabled
6551      * without having this check the calculations below to validate if the roi
6552      * is inside scalar crop region will fail resulting in the roi not being
6553      * reset causing algorithm to continue to use stale roi window
6554      */
6555     if (roi->weight == 0) {
6556         return true;
6557     }
6558 
6559     if ((roi_x_max < scalerCropRegion->left) ||
6560         // right edge of roi window is left of scalar crop's left edge
6561         (roi_y_max < scalerCropRegion->top)  ||
6562         // bottom edge of roi window is above scalar crop's top edge
6563         (roi->rect.left > crop_x_max) ||
6564         // left edge of roi window is beyond(right) of scalar crop's right edge
6565         (roi->rect.top > crop_y_max)){
6566         // top edge of roi windo is above scalar crop's top edge
6567         return false;
6568     }
6569     if (roi->rect.left < scalerCropRegion->left) {
6570         roi->rect.left = scalerCropRegion->left;
6571     }
6572     if (roi->rect.top < scalerCropRegion->top) {
6573         roi->rect.top = scalerCropRegion->top;
6574     }
6575     if (roi_x_max > crop_x_max) {
6576         roi_x_max = crop_x_max;
6577     }
6578     if (roi_y_max > crop_y_max) {
6579         roi_y_max = crop_y_max;
6580     }
6581     roi->rect.width = roi_x_max - roi->rect.left;
6582     roi->rect.height = roi_y_max - roi->rect.top;
6583     return true;
6584 }
6585 
6586 /*===========================================================================
6587  * FUNCTION   : convertLandmarks
6588  *
6589  * DESCRIPTION: helper method to extract the landmarks from face detection info
6590  *
6591  * PARAMETERS :
6592  *   @landmark_data : input landmark data to be converted
6593  *   @landmarks : int32_t destination array
6594  *
6595  *
6596  *==========================================================================*/
convertLandmarks(cam_face_landmarks_info_t landmark_data,int32_t * landmarks)6597 void QCamera3HardwareInterface::convertLandmarks(
6598         cam_face_landmarks_info_t landmark_data,
6599         int32_t *landmarks)
6600 {
6601     landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
6602     landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
6603     landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
6604     landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
6605     landmarks[4] = (int32_t)landmark_data.mouth_center.x;
6606     landmarks[5] = (int32_t)landmark_data.mouth_center.y;
6607 }
6608 
6609 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
6610 /*===========================================================================
6611  * FUNCTION   : initCapabilities
6612  *
6613  * DESCRIPTION: initialize camera capabilities in static data struct
6614  *
6615  * PARAMETERS :
6616  *   @cameraId  : camera Id
6617  *
6618  * RETURN     : int32_t type of status
6619  *              NO_ERROR  -- success
6620  *              none-zero failure code
6621  *==========================================================================*/
initCapabilities(uint32_t cameraId)6622 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
6623 {
6624     int rc = 0;
6625     mm_camera_vtbl_t *cameraHandle = NULL;
6626     QCamera3HeapMemory *capabilityHeap = NULL;
6627 
6628     rc = camera_open((uint8_t)cameraId, &cameraHandle);
6629     if (rc) {
6630         LOGE("camera_open failed. rc = %d", rc);
6631         goto open_failed;
6632     }
6633     if (!cameraHandle) {
6634         LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
6635         goto open_failed;
6636     }
6637 
6638     capabilityHeap = new QCamera3HeapMemory(1);
6639     if (capabilityHeap == NULL) {
6640         LOGE("creation of capabilityHeap failed");
6641         goto heap_creation_failed;
6642     }
6643     /* Allocate memory for capability buffer */
6644     rc = capabilityHeap->allocate(sizeof(cam_capability_t));
6645     if(rc != OK) {
6646         LOGE("No memory for cappability");
6647         goto allocate_failed;
6648     }
6649 
6650     /* Map memory for capability buffer */
6651     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
6652     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
6653                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
6654                                 capabilityHeap->getFd(0),
6655                                 sizeof(cam_capability_t),
6656                                 capabilityHeap->getPtr(0));
6657     if(rc < 0) {
6658         LOGE("failed to map capability buffer");
6659         goto map_failed;
6660     }
6661 
6662     /* Query Capability */
6663     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
6664     if(rc < 0) {
6665         LOGE("failed to query capability");
6666         goto query_failed;
6667     }
6668     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
6669     if (!gCamCapability[cameraId]) {
6670         LOGE("out of memory");
6671         goto query_failed;
6672     }
6673     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
6674                                         sizeof(cam_capability_t));
6675 
6676     int index;
6677     for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
6678         cam_analysis_info_t *p_analysis_info =
6679                 &gCamCapability[cameraId]->analysis_info[index];
6680         p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
6681         p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
6682     }
6683     rc = 0;
6684 
6685 query_failed:
6686     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
6687                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
6688 map_failed:
6689     capabilityHeap->deallocate();
6690 allocate_failed:
6691     delete capabilityHeap;
6692 heap_creation_failed:
6693     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
6694     cameraHandle = NULL;
6695 open_failed:
6696     return rc;
6697 }
6698 
6699 /*==========================================================================
6700  * FUNCTION   : get3Aversion
6701  *
6702  * DESCRIPTION: get the Q3A S/W version
6703  *
6704  * PARAMETERS :
6705  *  @sw_version: Reference of Q3A structure which will hold version info upon
6706  *               return
6707  *
6708  * RETURN     : None
6709  *
6710  *==========================================================================*/
get3AVersion(cam_q3a_version_t & sw_version)6711 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
6712 {
6713     if(gCamCapability[mCameraId])
6714         sw_version = gCamCapability[mCameraId]->q3a_version;
6715     else
6716         LOGE("Capability structure NULL!");
6717 }
6718 
6719 
6720 /*===========================================================================
6721  * FUNCTION   : initParameters
6722  *
6723  * DESCRIPTION: initialize camera parameters
6724  *
6725  * PARAMETERS :
6726  *
6727  * RETURN     : int32_t type of status
6728  *              NO_ERROR  -- success
6729  *              none-zero failure code
6730  *==========================================================================*/
initParameters()6731 int QCamera3HardwareInterface::initParameters()
6732 {
6733     int rc = 0;
6734 
6735     //Allocate Set Param Buffer
6736     mParamHeap = new QCamera3HeapMemory(1);
6737     rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
6738     if(rc != OK) {
6739         rc = NO_MEMORY;
6740         LOGE("Failed to allocate SETPARM Heap memory");
6741         delete mParamHeap;
6742         mParamHeap = NULL;
6743         return rc;
6744     }
6745 
6746     //Map memory for parameters buffer
6747     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
6748             CAM_MAPPING_BUF_TYPE_PARM_BUF,
6749             mParamHeap->getFd(0),
6750             sizeof(metadata_buffer_t),
6751             (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
6752     if(rc < 0) {
6753         LOGE("failed to map SETPARM buffer");
6754         rc = FAILED_TRANSACTION;
6755         mParamHeap->deallocate();
6756         delete mParamHeap;
6757         mParamHeap = NULL;
6758         return rc;
6759     }
6760 
6761     mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
6762 
6763     mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
6764     return rc;
6765 }
6766 
6767 /*===========================================================================
6768  * FUNCTION   : deinitParameters
6769  *
6770  * DESCRIPTION: de-initialize camera parameters
6771  *
6772  * PARAMETERS :
6773  *
6774  * RETURN     : NONE
6775  *==========================================================================*/
deinitParameters()6776 void QCamera3HardwareInterface::deinitParameters()
6777 {
6778     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6779             CAM_MAPPING_BUF_TYPE_PARM_BUF);
6780 
6781     mParamHeap->deallocate();
6782     delete mParamHeap;
6783     mParamHeap = NULL;
6784 
6785     mParameters = NULL;
6786 
6787     free(mPrevParameters);
6788     mPrevParameters = NULL;
6789 }
6790 
6791 /*===========================================================================
6792  * FUNCTION   : calcMaxJpegSize
6793  *
6794  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6795  *
6796  * PARAMETERS :
6797  *
6798  * RETURN     : max_jpeg_size
6799  *==========================================================================*/
calcMaxJpegSize(uint32_t camera_id)6800 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6801 {
6802     size_t max_jpeg_size = 0;
6803     size_t temp_width, temp_height;
6804     size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6805             MAX_SIZES_CNT);
6806     for (size_t i = 0; i < count; i++) {
6807         temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6808         temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6809         if (temp_width * temp_height > max_jpeg_size ) {
6810             max_jpeg_size = temp_width * temp_height;
6811         }
6812     }
6813     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6814     return max_jpeg_size;
6815 }
6816 
6817 /*===========================================================================
6818  * FUNCTION   : getMaxRawSize
6819  *
6820  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6821  *
6822  * PARAMETERS :
6823  *
6824  * RETURN     : Largest supported Raw Dimension
6825  *==========================================================================*/
getMaxRawSize(uint32_t camera_id)6826 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6827 {
6828     int max_width = 0;
6829     cam_dimension_t maxRawSize;
6830 
6831     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6832     for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6833         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6834             max_width = gCamCapability[camera_id]->raw_dim[i].width;
6835             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6836         }
6837     }
6838     return maxRawSize;
6839 }
6840 
6841 
6842 /*===========================================================================
6843  * FUNCTION   : calcMaxJpegDim
6844  *
6845  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6846  *
6847  * PARAMETERS :
6848  *
6849  * RETURN     : max_jpeg_dim
6850  *==========================================================================*/
calcMaxJpegDim()6851 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6852 {
6853     cam_dimension_t max_jpeg_dim;
6854     cam_dimension_t curr_jpeg_dim;
6855     max_jpeg_dim.width = 0;
6856     max_jpeg_dim.height = 0;
6857     curr_jpeg_dim.width = 0;
6858     curr_jpeg_dim.height = 0;
6859     for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
6860         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
6861         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
6862         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
6863             max_jpeg_dim.width * max_jpeg_dim.height ) {
6864             max_jpeg_dim.width = curr_jpeg_dim.width;
6865             max_jpeg_dim.height = curr_jpeg_dim.height;
6866         }
6867     }
6868     return max_jpeg_dim;
6869 }
6870 
6871 /*===========================================================================
6872  * FUNCTION   : addStreamConfig
6873  *
6874  * DESCRIPTION: adds the stream configuration to the array
6875  *
6876  * PARAMETERS :
6877  * @available_stream_configs : pointer to stream configuration array
6878  * @scalar_format            : scalar format
6879  * @dim                      : configuration dimension
6880  * @config_type              : input or output configuration type
6881  *
6882  * RETURN     : NONE
6883  *==========================================================================*/
addStreamConfig(Vector<int32_t> & available_stream_configs,int32_t scalar_format,const cam_dimension_t & dim,int32_t config_type)6884 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
6885         int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
6886 {
6887     available_stream_configs.add(scalar_format);
6888     available_stream_configs.add(dim.width);
6889     available_stream_configs.add(dim.height);
6890     available_stream_configs.add(config_type);
6891 }
6892 
6893 /*===========================================================================
6894  * FUNCTION   : suppportBurstCapture
6895  *
6896  * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
6897  *
6898  * PARAMETERS :
6899  *   @cameraId  : camera Id
6900  *
6901  * RETURN     : true if camera supports BURST_CAPTURE
6902  *              false otherwise
6903  *==========================================================================*/
supportBurstCapture(uint32_t cameraId)6904 bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
6905 {
6906     const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
6907     const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
6908     const int32_t highResWidth = 3264;
6909     const int32_t highResHeight = 2448;
6910 
6911     if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
6912         // Maximum resolution images cannot be captured at >= 10fps
6913         // -> not supporting BURST_CAPTURE
6914         return false;
6915     }
6916 
6917     if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
6918         // Maximum resolution images can be captured at >= 20fps
6919         // --> supporting BURST_CAPTURE
6920         return true;
6921     }
6922 
6923     // Find the smallest highRes resolution, or largest resolution if there is none
6924     size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
6925             MAX_SIZES_CNT);
6926     size_t highRes = 0;
6927     while ((highRes + 1 < totalCnt) &&
6928             (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
6929             gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
6930             highResWidth * highResHeight)) {
6931         highRes++;
6932     }
6933     if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
6934         return true;
6935     } else {
6936         return false;
6937     }
6938 }
6939 
6940 /*===========================================================================
6941  * FUNCTION   : initStaticMetadata
6942  *
6943  * DESCRIPTION: initialize the static metadata
6944  *
6945  * PARAMETERS :
6946  *   @cameraId  : camera Id
6947  *
6948  * RETURN     : int32_t type of status
6949  *              0  -- success
6950  *              non-zero failure code
6951  *==========================================================================*/
initStaticMetadata(uint32_t cameraId)6952 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
6953 {
6954     int rc = 0;
6955     CameraMetadata staticInfo;
6956     size_t count = 0;
6957     bool limitedDevice = false;
6958     char prop[PROPERTY_VALUE_MAX];
6959     bool supportBurst = false;
6960 
6961     supportBurst = supportBurstCapture(cameraId);
6962 
6963     /* If sensor is YUV sensor (no raw support) or if per-frame control is not
6964      * guaranteed or if min fps of max resolution is less than 20 fps, its
6965      * advertised as limited device*/
6966     limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
6967             (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
6968             (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
6969             !supportBurst;
6970 
6971     uint8_t supportedHwLvl = limitedDevice ?
6972             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
6973             // LEVEL_3 - This device will support level 3.
6974             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
6975 
6976     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
6977             &supportedHwLvl, 1);
6978 
6979     bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
6980     /*HAL 3 only*/
6981     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6982                     &gCamCapability[cameraId]->min_focus_distance, 1);
6983 
6984     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
6985                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
6986 
6987     /*should be using focal lengths but sensor doesn't provide that info now*/
6988     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6989                       &gCamCapability[cameraId]->focal_length,
6990                       1);
6991 
6992     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6993             gCamCapability[cameraId]->apertures,
6994             MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
6995 
6996     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6997             gCamCapability[cameraId]->filter_densities,
6998             MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
6999 
7000 
7001     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7002             (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
7003             MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
7004 
7005     int32_t lens_shading_map_size[] = {
7006             MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
7007             MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
7008     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
7009                       lens_shading_map_size,
7010                       sizeof(lens_shading_map_size)/sizeof(int32_t));
7011 
7012     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
7013             gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
7014 
7015     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
7016             gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
7017 
7018     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7019             &gCamCapability[cameraId]->max_frame_duration, 1);
7020 
7021     camera_metadata_rational baseGainFactor = {
7022             gCamCapability[cameraId]->base_gain_factor.numerator,
7023             gCamCapability[cameraId]->base_gain_factor.denominator};
7024     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
7025                       &baseGainFactor, 1);
7026 
7027     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7028                      (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
7029 
7030     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
7031             gCamCapability[cameraId]->pixel_array_size.height};
7032     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7033                       pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
7034 
7035     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
7036             gCamCapability[cameraId]->active_array_size.top,
7037             gCamCapability[cameraId]->active_array_size.width,
7038             gCamCapability[cameraId]->active_array_size.height};
7039     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7040             active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
7041 
7042     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
7043             &gCamCapability[cameraId]->white_level, 1);
7044 
7045     int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
7046     adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
7047             gCamCapability[cameraId]->color_arrangement);
7048     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
7049             adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
7050 
7051     bool hasBlackRegions = false;
7052     if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
7053         LOGW("black_region_count: %d is bounded to %d",
7054             gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
7055         gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
7056     }
7057     if (gCamCapability[cameraId]->optical_black_region_count != 0) {
7058         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
7059         for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
7060             opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
7061         }
7062         staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
7063                 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
7064         hasBlackRegions = true;
7065     }
7066 
7067     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
7068             &gCamCapability[cameraId]->flash_charge_duration, 1);
7069 
7070     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
7071             &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
7072 
7073     uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
7074             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
7075             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
7076     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7077             &timestampSource, 1);
7078 
7079     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7080             &gCamCapability[cameraId]->histogram_size, 1);
7081 
7082     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7083             &gCamCapability[cameraId]->max_histogram_count, 1);
7084 
7085     int32_t sharpness_map_size[] = {
7086             gCamCapability[cameraId]->sharpness_map_size.width,
7087             gCamCapability[cameraId]->sharpness_map_size.height};
7088 
7089     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
7090             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
7091 
7092     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7093             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
7094 
7095     int32_t scalar_formats[] = {
7096             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
7097             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
7098             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7099             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
7100             HAL_PIXEL_FORMAT_RAW10,
7101             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7102     size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
7103     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
7104                       scalar_formats,
7105                       scalar_formats_count);
7106 
7107     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
7108     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7109     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
7110             count, MAX_SIZES_CNT, available_processed_sizes);
7111     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
7112             available_processed_sizes, count * 2);
7113 
7114     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
7115     count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
7116     makeTable(gCamCapability[cameraId]->raw_dim,
7117             count, MAX_SIZES_CNT, available_raw_sizes);
7118     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
7119             available_raw_sizes, count * 2);
7120 
7121     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
7122     count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
7123     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
7124             count, MAX_SIZES_CNT, available_fps_ranges);
7125     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7126             available_fps_ranges, count * 2);
7127 
7128     camera_metadata_rational exposureCompensationStep = {
7129             gCamCapability[cameraId]->exp_compensation_step.numerator,
7130             gCamCapability[cameraId]->exp_compensation_step.denominator};
7131     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
7132                       &exposureCompensationStep, 1);
7133 
7134     Vector<uint8_t> availableVstabModes;
7135     availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
7136     char eis_prop[PROPERTY_VALUE_MAX];
7137     memset(eis_prop, 0, sizeof(eis_prop));
7138     property_get("persist.camera.eis.enable", eis_prop, "0");
7139     uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7140     if (facingBack && eis_prop_set) {
7141         availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
7142     }
7143     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7144                       availableVstabModes.array(), availableVstabModes.size());
7145 
7146     /*HAL 1 and HAL 3 common*/
7147     float maxZoom = 4;
7148     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7149             &maxZoom, 1);
7150 
7151     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
7152     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
7153 
7154     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
7155     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
7156         max3aRegions[2] = 0; /* AF not supported */
7157     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
7158             max3aRegions, 3);
7159 
7160     /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
7161     memset(prop, 0, sizeof(prop));
7162     property_get("persist.camera.facedetect", prop, "1");
7163     uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
7164     LOGD("Support face detection mode: %d",
7165              supportedFaceDetectMode);
7166 
7167     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
7168     Vector<uint8_t> availableFaceDetectModes;
7169     availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
7170     if (supportedFaceDetectMode == 1) {
7171         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7172     } else if (supportedFaceDetectMode == 2) {
7173         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7174     } else if (supportedFaceDetectMode == 3) {
7175         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7176         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7177     } else {
7178         maxFaces = 0;
7179     }
7180     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7181             availableFaceDetectModes.array(),
7182             availableFaceDetectModes.size());
7183     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
7184             (int32_t *)&maxFaces, 1);
7185 
7186     int32_t exposureCompensationRange[] = {
7187             gCamCapability[cameraId]->exposure_compensation_min,
7188             gCamCapability[cameraId]->exposure_compensation_max};
7189     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
7190             exposureCompensationRange,
7191             sizeof(exposureCompensationRange)/sizeof(int32_t));
7192 
7193     uint8_t lensFacing = (facingBack) ?
7194             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
7195     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
7196 
7197     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7198                       available_thumbnail_sizes,
7199                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
7200 
7201     /*all sizes will be clubbed into this tag*/
7202     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7203     /*android.scaler.availableStreamConfigurations*/
7204     Vector<int32_t> available_stream_configs;
7205     cam_dimension_t active_array_dim;
7206     active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
7207     active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
7208     /* Add input/output stream configurations for each scalar formats*/
7209     for (size_t j = 0; j < scalar_formats_count; j++) {
7210         switch (scalar_formats[j]) {
7211         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7212         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7213         case HAL_PIXEL_FORMAT_RAW10:
7214             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7215                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7216                 addStreamConfig(available_stream_configs, scalar_formats[j],
7217                         gCamCapability[cameraId]->raw_dim[i],
7218                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7219             }
7220             break;
7221         case HAL_PIXEL_FORMAT_BLOB:
7222             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7223                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7224                 addStreamConfig(available_stream_configs, scalar_formats[j],
7225                         gCamCapability[cameraId]->picture_sizes_tbl[i],
7226                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7227             }
7228             break;
7229         case HAL_PIXEL_FORMAT_YCbCr_420_888:
7230         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
7231         default:
7232             cam_dimension_t largest_picture_size;
7233             memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
7234             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7235                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7236                 addStreamConfig(available_stream_configs, scalar_formats[j],
7237                         gCamCapability[cameraId]->picture_sizes_tbl[i],
7238                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7239                 /* Book keep largest */
7240                 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
7241                         >= largest_picture_size.width &&
7242                         gCamCapability[cameraId]->picture_sizes_tbl[i].height
7243                         >= largest_picture_size.height)
7244                     largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
7245             }
7246             /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
7247             if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
7248                     scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
7249                  addStreamConfig(available_stream_configs, scalar_formats[j],
7250                          largest_picture_size,
7251                          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
7252             }
7253             break;
7254         }
7255     }
7256 
7257     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7258                       available_stream_configs.array(), available_stream_configs.size());
7259     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7260     staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7261 
7262     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7263     staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7264 
7265     /* android.scaler.availableMinFrameDurations */
7266     Vector<int64_t> available_min_durations;
7267     for (size_t j = 0; j < scalar_formats_count; j++) {
7268         switch (scalar_formats[j]) {
7269         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7270         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7271         case HAL_PIXEL_FORMAT_RAW10:
7272             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7273                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7274                 available_min_durations.add(scalar_formats[j]);
7275                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7276                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7277                 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
7278             }
7279             break;
7280         default:
7281             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7282                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7283                 available_min_durations.add(scalar_formats[j]);
7284                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7285                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7286                 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
7287             }
7288             break;
7289         }
7290     }
7291     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
7292                       available_min_durations.array(), available_min_durations.size());
7293 
7294     Vector<int32_t> available_hfr_configs;
7295     for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
7296         int32_t fps = 0;
7297         switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
7298         case CAM_HFR_MODE_60FPS:
7299             fps = 60;
7300             break;
7301         case CAM_HFR_MODE_90FPS:
7302             fps = 90;
7303             break;
7304         case CAM_HFR_MODE_120FPS:
7305             fps = 120;
7306             break;
7307         case CAM_HFR_MODE_150FPS:
7308             fps = 150;
7309             break;
7310         case CAM_HFR_MODE_180FPS:
7311             fps = 180;
7312             break;
7313         case CAM_HFR_MODE_210FPS:
7314             fps = 210;
7315             break;
7316         case CAM_HFR_MODE_240FPS:
7317             fps = 240;
7318             break;
7319         case CAM_HFR_MODE_480FPS:
7320             fps = 480;
7321             break;
7322         case CAM_HFR_MODE_OFF:
7323         case CAM_HFR_MODE_MAX:
7324         default:
7325             break;
7326         }
7327 
7328         /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
7329         if (fps >= MIN_FPS_FOR_BATCH_MODE) {
7330             /* For each HFR frame rate, need to advertise one variable fps range
7331              * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
7332              * and [120, 120]. While camcorder preview alone is running [30, 120] is
7333              * set by the app. When video recording is started, [120, 120] is
7334              * set. This way sensor configuration does not change when recording
7335              * is started */
7336 
7337             /* (width, height, fps_min, fps_max, batch_size_max) */
7338             for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
7339                 j < MAX_SIZES_CNT; j++) {
7340                 available_hfr_configs.add(
7341                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7342                 available_hfr_configs.add(
7343                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7344                 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
7345                 available_hfr_configs.add(fps);
7346                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7347 
7348                 /* (width, height, fps_min, fps_max, batch_size_max) */
7349                 available_hfr_configs.add(
7350                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7351                 available_hfr_configs.add(
7352                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7353                 available_hfr_configs.add(fps);
7354                 available_hfr_configs.add(fps);
7355                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7356             }
7357        }
7358     }
7359     //Advertise HFR capability only if the property is set
7360     memset(prop, 0, sizeof(prop));
7361     property_get("persist.camera.hal3hfr.enable", prop, "1");
7362     uint8_t hfrEnable = (uint8_t)atoi(prop);
7363 
7364     if(hfrEnable && available_hfr_configs.array()) {
7365         staticInfo.update(
7366                 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
7367                 available_hfr_configs.array(), available_hfr_configs.size());
7368     }
7369 
7370     int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
7371     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
7372                       &max_jpeg_size, 1);
7373 
7374     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
7375     size_t size = 0;
7376     count = CAM_EFFECT_MODE_MAX;
7377     count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
7378     for (size_t i = 0; i < count; i++) {
7379         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7380                 gCamCapability[cameraId]->supported_effects[i]);
7381         if (NAME_NOT_FOUND != val) {
7382             avail_effects[size] = (uint8_t)val;
7383             size++;
7384         }
7385     }
7386     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
7387                       avail_effects,
7388                       size);
7389 
7390     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
7391     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
7392     size_t supported_scene_modes_cnt = 0;
7393     count = CAM_SCENE_MODE_MAX;
7394     count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
7395     for (size_t i = 0; i < count; i++) {
7396         if (gCamCapability[cameraId]->supported_scene_modes[i] !=
7397                 CAM_SCENE_MODE_OFF) {
7398             int val = lookupFwkName(SCENE_MODES_MAP,
7399                     METADATA_MAP_SIZE(SCENE_MODES_MAP),
7400                     gCamCapability[cameraId]->supported_scene_modes[i]);
7401             if (NAME_NOT_FOUND != val) {
7402                 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
7403                 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
7404                 supported_scene_modes_cnt++;
7405             }
7406         }
7407     }
7408     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7409                       avail_scene_modes,
7410                       supported_scene_modes_cnt);
7411 
7412     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
7413     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
7414                       supported_scene_modes_cnt,
7415                       CAM_SCENE_MODE_MAX,
7416                       scene_mode_overrides,
7417                       supported_indexes,
7418                       cameraId);
7419 
7420     if (supported_scene_modes_cnt == 0) {
7421         supported_scene_modes_cnt = 1;
7422         avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
7423     }
7424 
7425     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
7426             scene_mode_overrides, supported_scene_modes_cnt * 3);
7427 
7428     uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
7429                                          ANDROID_CONTROL_MODE_AUTO,
7430                                          ANDROID_CONTROL_MODE_USE_SCENE_MODE};
7431     staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
7432             available_control_modes,
7433             3);
7434 
7435     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
7436     size = 0;
7437     count = CAM_ANTIBANDING_MODE_MAX;
7438     count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
7439     for (size_t i = 0; i < count; i++) {
7440         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
7441                 gCamCapability[cameraId]->supported_antibandings[i]);
7442         if (NAME_NOT_FOUND != val) {
7443             avail_antibanding_modes[size] = (uint8_t)val;
7444             size++;
7445         }
7446 
7447     }
7448     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7449                       avail_antibanding_modes,
7450                       size);
7451 
7452     uint8_t avail_abberation_modes[] = {
7453             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
7454             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
7455             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
7456     count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
7457     count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
7458     if (0 == count) {
7459         //  If no aberration correction modes are available for a device, this advertise OFF mode
7460         size = 1;
7461     } else {
7462         // If count is not zero then atleast one among the FAST or HIGH quality is supported
7463         // So, advertize all 3 modes if atleast any one mode is supported as per the
7464         // new M requirement
7465         size = 3;
7466     }
7467     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7468             avail_abberation_modes,
7469             size);
7470 
7471     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
7472     size = 0;
7473     count = CAM_FOCUS_MODE_MAX;
7474     count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
7475     for (size_t i = 0; i < count; i++) {
7476         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7477                 gCamCapability[cameraId]->supported_focus_modes[i]);
7478         if (NAME_NOT_FOUND != val) {
7479             avail_af_modes[size] = (uint8_t)val;
7480             size++;
7481         }
7482     }
7483     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
7484                       avail_af_modes,
7485                       size);
7486 
7487     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
7488     size = 0;
7489     count = CAM_WB_MODE_MAX;
7490     count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
7491     for (size_t i = 0; i < count; i++) {
7492         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7493                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7494                 gCamCapability[cameraId]->supported_white_balances[i]);
7495         if (NAME_NOT_FOUND != val) {
7496             avail_awb_modes[size] = (uint8_t)val;
7497             size++;
7498         }
7499     }
7500     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
7501                       avail_awb_modes,
7502                       size);
7503 
7504     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
7505     count = CAM_FLASH_FIRING_LEVEL_MAX;
7506     count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
7507             count);
7508     for (size_t i = 0; i < count; i++) {
7509         available_flash_levels[i] =
7510                 gCamCapability[cameraId]->supported_firing_levels[i];
7511     }
7512     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
7513             available_flash_levels, count);
7514 
7515     uint8_t flashAvailable;
7516     if (gCamCapability[cameraId]->flash_available)
7517         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
7518     else
7519         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
7520     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
7521             &flashAvailable, 1);
7522 
7523     Vector<uint8_t> avail_ae_modes;
7524     count = CAM_AE_MODE_MAX;
7525     count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
7526     for (size_t i = 0; i < count; i++) {
7527         avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
7528     }
7529     if (flashAvailable) {
7530         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
7531         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
7532     }
7533     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
7534                       avail_ae_modes.array(),
7535                       avail_ae_modes.size());
7536 
7537     int32_t sensitivity_range[2];
7538     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
7539     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
7540     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
7541                       sensitivity_range,
7542                       sizeof(sensitivity_range) / sizeof(int32_t));
7543 
7544     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7545                       &gCamCapability[cameraId]->max_analog_sensitivity,
7546                       1);
7547 
7548     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
7549     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
7550                       &sensor_orientation,
7551                       1);
7552 
7553     int32_t max_output_streams[] = {
7554             MAX_STALLING_STREAMS,
7555             MAX_PROCESSED_STREAMS,
7556             MAX_RAW_STREAMS};
7557     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
7558             max_output_streams,
7559             sizeof(max_output_streams)/sizeof(max_output_streams[0]));
7560 
7561     uint8_t avail_leds = 0;
7562     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
7563                       &avail_leds, 0);
7564 
7565     uint8_t focus_dist_calibrated;
7566     int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
7567             gCamCapability[cameraId]->focus_dist_calibrated);
7568     if (NAME_NOT_FOUND != val) {
7569         focus_dist_calibrated = (uint8_t)val;
7570         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7571                      &focus_dist_calibrated, 1);
7572     }
7573 
7574     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
7575     size = 0;
7576     count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
7577             MAX_TEST_PATTERN_CNT);
7578     for (size_t i = 0; i < count; i++) {
7579         int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
7580                 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
7581         if (NAME_NOT_FOUND != testpatternMode) {
7582             avail_testpattern_modes[size] = testpatternMode;
7583             size++;
7584         }
7585     }
7586     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7587                       avail_testpattern_modes,
7588                       size);
7589 
7590     uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
7591     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
7592                       &max_pipeline_depth,
7593                       1);
7594 
7595     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
7596     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7597                       &partial_result_count,
7598                        1);
7599 
7600     int32_t max_stall_duration = MAX_REPROCESS_STALL;
7601     staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
7602 
7603     Vector<uint8_t> available_capabilities;
7604     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
7605     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
7606     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
7607     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
7608     if (supportBurst) {
7609         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
7610     }
7611     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
7612     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
7613     if (hfrEnable && available_hfr_configs.array()) {
7614         available_capabilities.add(
7615                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
7616     }
7617 
7618     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
7619         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
7620     }
7621     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7622             available_capabilities.array(),
7623             available_capabilities.size());
7624 
7625     //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
7626     //Assumption is that all bayer cameras support MANUAL_SENSOR.
7627     uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7628             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
7629 
7630     staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7631             &aeLockAvailable, 1);
7632 
7633     //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
7634     //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
7635     uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7636             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
7637 
7638     staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7639             &awbLockAvailable, 1);
7640 
7641     int32_t max_input_streams = 1;
7642     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7643                       &max_input_streams,
7644                       1);
7645 
7646     /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
7647     int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
7648             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
7649             HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
7650             HAL_PIXEL_FORMAT_YCbCr_420_888};
7651     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7652                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
7653 
7654     int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
7655     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
7656                       &max_latency,
7657                       1);
7658 
7659     int32_t isp_sensitivity_range[2];
7660     isp_sensitivity_range[0] =
7661         gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
7662     isp_sensitivity_range[1] =
7663         gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
7664     staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7665                       isp_sensitivity_range,
7666                       sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
7667 
7668     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
7669                                            ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
7670     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7671             available_hot_pixel_modes,
7672             sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
7673 
7674     uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
7675                                          ANDROID_SHADING_MODE_FAST,
7676                                          ANDROID_SHADING_MODE_HIGH_QUALITY};
7677     staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
7678                       available_shading_modes,
7679                       3);
7680 
7681     uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
7682                                                   ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
7683     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7684                       available_lens_shading_map_modes,
7685                       2);
7686 
7687     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
7688                                       ANDROID_EDGE_MODE_FAST,
7689                                       ANDROID_EDGE_MODE_HIGH_QUALITY,
7690                                       ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
7691     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7692             available_edge_modes,
7693             sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
7694 
7695     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
7696                                            ANDROID_NOISE_REDUCTION_MODE_FAST,
7697                                            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
7698                                            ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
7699                                            ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
7700     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7701             available_noise_red_modes,
7702             sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
7703 
7704     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
7705                                          ANDROID_TONEMAP_MODE_FAST,
7706                                          ANDROID_TONEMAP_MODE_HIGH_QUALITY};
7707     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7708             available_tonemap_modes,
7709             sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
7710 
7711     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
7712     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7713             available_hot_pixel_map_modes,
7714             sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
7715 
7716     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7717             gCamCapability[cameraId]->reference_illuminant1);
7718     if (NAME_NOT_FOUND != val) {
7719         uint8_t fwkReferenceIlluminant = (uint8_t)val;
7720         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
7721     }
7722 
7723     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7724             gCamCapability[cameraId]->reference_illuminant2);
7725     if (NAME_NOT_FOUND != val) {
7726         uint8_t fwkReferenceIlluminant = (uint8_t)val;
7727         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
7728     }
7729 
7730     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
7731             (void *)gCamCapability[cameraId]->forward_matrix1,
7732             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7733 
7734     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
7735             (void *)gCamCapability[cameraId]->forward_matrix2,
7736             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7737 
7738     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
7739             (void *)gCamCapability[cameraId]->color_transform1,
7740             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7741 
7742     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
7743             (void *)gCamCapability[cameraId]->color_transform2,
7744             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7745 
7746     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
7747             (void *)gCamCapability[cameraId]->calibration_transform1,
7748             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7749 
7750     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
7751             (void *)gCamCapability[cameraId]->calibration_transform2,
7752             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7753 
7754     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
7755        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
7756        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
7757        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7758        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
7759        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7760        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
7761        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
7762        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
7763        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
7764        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
7765        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
7766        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7767        ANDROID_JPEG_GPS_COORDINATES,
7768        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
7769        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
7770        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
7771        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7772        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7773        ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7774        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7775        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7776        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7777        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
7778        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7779        ANDROID_STATISTICS_FACE_DETECT_MODE,
7780        ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7781        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7782        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7783        ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7784        /* DevCamDebug metadata request_keys_basic */
7785        DEVCAMDEBUG_META_ENABLE,
7786        /* DevCamDebug metadata end */
7787        };
7788 
7789     size_t request_keys_cnt =
7790             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7791     Vector<int32_t> available_request_keys;
7792     available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7793     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7794         available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7795     }
7796 
7797     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7798             available_request_keys.array(), available_request_keys.size());
7799 
7800     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7801        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7802        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7803        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7804        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7805        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7806        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7807        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7808        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7809        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7810        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7811        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7812        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7813        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7814        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7815        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7816        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7817        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7818        ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7819        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7820        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7821        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
7822        ANDROID_STATISTICS_FACE_SCORES,
7823        NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7824        NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
7825        // DevCamDebug metadata result_keys_basic
7826        DEVCAMDEBUG_META_ENABLE,
7827        // DevCamDebug metadata result_keys AF
7828        DEVCAMDEBUG_AF_LENS_POSITION,
7829        DEVCAMDEBUG_AF_TOF_CONFIDENCE,
7830        DEVCAMDEBUG_AF_TOF_DISTANCE,
7831        DEVCAMDEBUG_AF_LUMA,
7832        DEVCAMDEBUG_AF_HAF_STATE,
7833        DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
7834        DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
7835        DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
7836        DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
7837        DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
7838        DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
7839        DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
7840        DEVCAMDEBUG_AF_MONITOR_REFOCUS,
7841        DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
7842        DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
7843        DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
7844        DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
7845        DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
7846        DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
7847        DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
7848        DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
7849        DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
7850        DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
7851        DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
7852        DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
7853        DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
7854        DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
7855        // DevCamDebug metadata result_keys AEC
7856        DEVCAMDEBUG_AEC_TARGET_LUMA,
7857        DEVCAMDEBUG_AEC_COMP_LUMA,
7858        DEVCAMDEBUG_AEC_AVG_LUMA,
7859        DEVCAMDEBUG_AEC_CUR_LUMA,
7860        DEVCAMDEBUG_AEC_LINECOUNT,
7861        DEVCAMDEBUG_AEC_REAL_GAIN,
7862        DEVCAMDEBUG_AEC_EXP_INDEX,
7863        DEVCAMDEBUG_AEC_LUX_IDX,
7864        // DevCamDebug metadata result_keys AWB
7865        DEVCAMDEBUG_AWB_R_GAIN,
7866        DEVCAMDEBUG_AWB_G_GAIN,
7867        DEVCAMDEBUG_AWB_B_GAIN,
7868        DEVCAMDEBUG_AWB_CCT,
7869        DEVCAMDEBUG_AWB_DECISION,
7870        /* DevCamDebug metadata end */
7871        };
7872     size_t result_keys_cnt =
7873             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
7874 
7875     Vector<int32_t> available_result_keys;
7876     available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
7877     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7878         available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
7879     }
7880     if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
7881         available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
7882         available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
7883     }
7884     if (supportedFaceDetectMode == 1) {
7885         available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
7886         available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
7887     } else if ((supportedFaceDetectMode == 2) ||
7888             (supportedFaceDetectMode == 3)) {
7889         available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
7890         available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
7891     }
7892     if (hasBlackRegions) {
7893         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
7894         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
7895     }
7896     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7897             available_result_keys.array(), available_result_keys.size());
7898 
7899     int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7900        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7901        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
7902        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
7903        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7904        ANDROID_SCALER_CROPPING_TYPE,
7905        ANDROID_SYNC_MAX_LATENCY,
7906        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7907        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7908        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7909        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
7910        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
7911        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7912        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7913        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7914        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7915        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7916        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7917        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7918        ANDROID_LENS_FACING,
7919        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7920        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7921        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7922        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7923        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7924        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7925        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7926        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
7927        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
7928        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
7929        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
7930        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
7931        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7932        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7933        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7934        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7935        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
7936        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7937        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7938        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7939        ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7940        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7941        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7942        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7943        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7944        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7945        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7946        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7947        ANDROID_TONEMAP_MAX_CURVE_POINTS,
7948        ANDROID_CONTROL_AVAILABLE_MODES,
7949        ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7950        ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7951        ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7952        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7953        ANDROID_SHADING_AVAILABLE_MODES,
7954        ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7955        ANDROID_SENSOR_OPAQUE_RAW_SIZE };
7956 
7957     Vector<int32_t> available_characteristics_keys;
7958     available_characteristics_keys.appendArray(characteristics_keys_basic,
7959             sizeof(characteristics_keys_basic)/sizeof(int32_t));
7960     if (hasBlackRegions) {
7961         available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
7962     }
7963     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
7964                       available_characteristics_keys.array(),
7965                       available_characteristics_keys.size());
7966 
7967     /*available stall durations depend on the hw + sw and will be different for different devices */
7968     /*have to add for raw after implementation*/
7969     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
7970     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
7971 
7972     Vector<int64_t> available_stall_durations;
7973     for (uint32_t j = 0; j < stall_formats_count; j++) {
7974         if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
7975             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7976                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7977                 available_stall_durations.add(stall_formats[j]);
7978                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7979                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7980                 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
7981           }
7982         } else {
7983             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7984                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7985                 available_stall_durations.add(stall_formats[j]);
7986                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7987                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7988                 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
7989             }
7990         }
7991     }
7992     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
7993                       available_stall_durations.array(),
7994                       available_stall_durations.size());
7995 
7996     //QCAMERA3_OPAQUE_RAW
7997     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7998     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7999     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
8000     case LEGACY_RAW:
8001         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8002             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
8003         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8004             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8005         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8006             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
8007         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8008         break;
8009     case MIPI_RAW:
8010         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8011             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
8012         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8013             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
8014         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8015             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
8016         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
8017         break;
8018     default:
8019         LOGE("unknown opaque_raw_format %d",
8020                 gCamCapability[cameraId]->opaque_raw_fmt);
8021         break;
8022     }
8023     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
8024 
8025     Vector<int32_t> strides;
8026     for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8027             gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8028         cam_stream_buf_plane_info_t buf_planes;
8029         strides.add(gCamCapability[cameraId]->raw_dim[i].width);
8030         strides.add(gCamCapability[cameraId]->raw_dim[i].height);
8031         mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8032             &gCamCapability[cameraId]->padding_info, &buf_planes);
8033         strides.add(buf_planes.plane_info.mp[0].stride);
8034     }
8035     staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
8036             strides.size());
8037 
8038     Vector<int32_t> opaque_size;
8039     for (size_t j = 0; j < scalar_formats_count; j++) {
8040         if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
8041             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8042                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8043                 cam_stream_buf_plane_info_t buf_planes;
8044 
8045                 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8046                          &gCamCapability[cameraId]->padding_info, &buf_planes);
8047 
8048                 if (rc == 0) {
8049                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
8050                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
8051                     opaque_size.add(buf_planes.plane_info.frame_len);
8052                 }else {
8053                     LOGE("raw frame calculation failed!");
8054                 }
8055             }
8056         }
8057     }
8058 
8059     if ((opaque_size.size() > 0) &&
8060             (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
8061         staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
8062     else
8063         LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
8064 
8065     gStaticMetadata[cameraId] = staticInfo.release();
8066     return rc;
8067 }
8068 
8069 /*===========================================================================
8070  * FUNCTION   : makeTable
8071  *
8072  * DESCRIPTION: make a table of sizes
8073  *
8074  * PARAMETERS :
8075  *
8076  *
8077  *==========================================================================*/
makeTable(cam_dimension_t * dimTable,size_t size,size_t max_size,int32_t * sizeTable)8078 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
8079         size_t max_size, int32_t *sizeTable)
8080 {
8081     size_t j = 0;
8082     if (size > max_size) {
8083        size = max_size;
8084     }
8085     for (size_t i = 0; i < size; i++) {
8086         sizeTable[j] = dimTable[i].width;
8087         sizeTable[j+1] = dimTable[i].height;
8088         j+=2;
8089     }
8090 }
8091 
8092 /*===========================================================================
8093  * FUNCTION   : makeFPSTable
8094  *
8095  * DESCRIPTION: make a table of fps ranges
8096  *
8097  * PARAMETERS :
8098  *
8099  *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,size_t size,size_t max_size,int32_t * fpsRangesTable)8100 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
8101         size_t max_size, int32_t *fpsRangesTable)
8102 {
8103     size_t j = 0;
8104     if (size > max_size) {
8105        size = max_size;
8106     }
8107     for (size_t i = 0; i < size; i++) {
8108         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
8109         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
8110         j+=2;
8111     }
8112 }
8113 
8114 /*===========================================================================
8115  * FUNCTION   : makeOverridesList
8116  *
8117  * DESCRIPTION: make a list of scene mode overrides
8118  *
8119  * PARAMETERS :
8120  *
8121  *
8122  *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,size_t size,size_t max_size,uint8_t * overridesList,uint8_t * supported_indexes,uint32_t camera_id)8123 void QCamera3HardwareInterface::makeOverridesList(
8124         cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
8125         uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
8126 {
8127     /*daemon will give a list of overrides for all scene modes.
8128       However we should send the fwk only the overrides for the scene modes
8129       supported by the framework*/
8130     size_t j = 0;
8131     if (size > max_size) {
8132        size = max_size;
8133     }
8134     size_t focus_count = CAM_FOCUS_MODE_MAX;
8135     focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
8136             focus_count);
8137     for (size_t i = 0; i < size; i++) {
8138         bool supt = false;
8139         size_t index = supported_indexes[i];
8140         overridesList[j] = gCamCapability[camera_id]->flash_available ?
8141                 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
8142         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8143                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8144                 overridesTable[index].awb_mode);
8145         if (NAME_NOT_FOUND != val) {
8146             overridesList[j+1] = (uint8_t)val;
8147         }
8148         uint8_t focus_override = overridesTable[index].af_mode;
8149         for (size_t k = 0; k < focus_count; k++) {
8150            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
8151               supt = true;
8152               break;
8153            }
8154         }
8155         if (supt) {
8156             val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8157                     focus_override);
8158             if (NAME_NOT_FOUND != val) {
8159                 overridesList[j+2] = (uint8_t)val;
8160             }
8161         } else {
8162            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
8163         }
8164         j+=3;
8165     }
8166 }
8167 
8168 /*===========================================================================
8169  * FUNCTION   : filterJpegSizes
8170  *
8171  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
8172  *              could be downscaled to
8173  *
8174  * PARAMETERS :
8175  *
8176  * RETURN     : length of jpegSizes array
8177  *==========================================================================*/
8178 
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,size_t processedSizesCnt,size_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)8179 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
8180         size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
8181         uint8_t downscale_factor)
8182 {
8183     if (0 == downscale_factor) {
8184         downscale_factor = 1;
8185     }
8186 
8187     int32_t min_width = active_array_size.width / downscale_factor;
8188     int32_t min_height = active_array_size.height / downscale_factor;
8189     size_t jpegSizesCnt = 0;
8190     if (processedSizesCnt > maxCount) {
8191         processedSizesCnt = maxCount;
8192     }
8193     for (size_t i = 0; i < processedSizesCnt; i+=2) {
8194         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
8195             jpegSizes[jpegSizesCnt] = processedSizes[i];
8196             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
8197             jpegSizesCnt += 2;
8198         }
8199     }
8200     return jpegSizesCnt;
8201 }
8202 
8203 /*===========================================================================
8204  * FUNCTION   : computeNoiseModelEntryS
8205  *
8206  * DESCRIPTION: function to map a given sensitivity to the S noise
8207  *              model parameters in the DNG noise model.
8208  *
8209  * PARAMETERS : sens : the sensor sensitivity
8210  *
8211  ** RETURN    : S (sensor amplification) noise
8212  *
8213  *==========================================================================*/
computeNoiseModelEntryS(int32_t sens)8214 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
8215     double s = gCamCapability[mCameraId]->gradient_S * sens +
8216             gCamCapability[mCameraId]->offset_S;
8217     return ((s < 0.0) ? 0.0 : s);
8218 }
8219 
8220 /*===========================================================================
8221  * FUNCTION   : computeNoiseModelEntryO
8222  *
8223  * DESCRIPTION: function to map a given sensitivity to the O noise
8224  *              model parameters in the DNG noise model.
8225  *
8226  * PARAMETERS : sens : the sensor sensitivity
8227  *
8228  ** RETURN    : O (sensor readout) noise
8229  *
8230  *==========================================================================*/
computeNoiseModelEntryO(int32_t sens)8231 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
8232     int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
8233     double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
8234             1.0 : (1.0 * sens / max_analog_sens);
8235     double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
8236             gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
8237     return ((o < 0.0) ? 0.0 : o);
8238 }
8239 
8240 /*===========================================================================
8241  * FUNCTION   : getSensorSensitivity
8242  *
8243  * DESCRIPTION: convert iso_mode to an integer value
8244  *
8245  * PARAMETERS : iso_mode : the iso_mode supported by sensor
8246  *
8247  ** RETURN    : sensitivity supported by sensor
8248  *
8249  *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)8250 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
8251 {
8252     int32_t sensitivity;
8253 
8254     switch (iso_mode) {
8255     case CAM_ISO_MODE_100:
8256         sensitivity = 100;
8257         break;
8258     case CAM_ISO_MODE_200:
8259         sensitivity = 200;
8260         break;
8261     case CAM_ISO_MODE_400:
8262         sensitivity = 400;
8263         break;
8264     case CAM_ISO_MODE_800:
8265         sensitivity = 800;
8266         break;
8267     case CAM_ISO_MODE_1600:
8268         sensitivity = 1600;
8269         break;
8270     default:
8271         sensitivity = -1;
8272         break;
8273     }
8274     return sensitivity;
8275 }
8276 
8277 /*===========================================================================
8278  * FUNCTION   : getCamInfo
8279  *
8280  * DESCRIPTION: query camera capabilities
8281  *
8282  * PARAMETERS :
8283  *   @cameraId  : camera Id
8284  *   @info      : camera info struct to be filled in with camera capabilities
8285  *
8286  * RETURN     : int type of status
8287  *              NO_ERROR  -- success
8288  *              none-zero failure code
8289  *==========================================================================*/
getCamInfo(uint32_t cameraId,struct camera_info * info)8290 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
8291         struct camera_info *info)
8292 {
8293     ATRACE_CALL();
8294     int rc = 0;
8295 
8296     pthread_mutex_lock(&gCamLock);
8297     if (NULL == gCamCapability[cameraId]) {
8298         rc = initCapabilities(cameraId);
8299         if (rc < 0) {
8300             pthread_mutex_unlock(&gCamLock);
8301             return rc;
8302         }
8303     }
8304 
8305     if (NULL == gStaticMetadata[cameraId]) {
8306         rc = initStaticMetadata(cameraId);
8307         if (rc < 0) {
8308             pthread_mutex_unlock(&gCamLock);
8309             return rc;
8310         }
8311     }
8312 
8313     switch(gCamCapability[cameraId]->position) {
8314     case CAM_POSITION_BACK:
8315         info->facing = CAMERA_FACING_BACK;
8316         break;
8317 
8318     case CAM_POSITION_FRONT:
8319         info->facing = CAMERA_FACING_FRONT;
8320         break;
8321 
8322     default:
8323         LOGE("Unknown position type for camera id:%d", cameraId);
8324         rc = -1;
8325         break;
8326     }
8327 
8328 
8329     info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
8330     info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
8331     info->static_camera_characteristics = gStaticMetadata[cameraId];
8332 
8333     //For now assume both cameras can operate independently.
8334     info->conflicting_devices = NULL;
8335     info->conflicting_devices_length = 0;
8336 
8337     //resource cost is 100 * MIN(1.0, m/M),
8338     //where m is throughput requirement with maximum stream configuration
8339     //and M is CPP maximum throughput.
8340     float max_fps = 0.0;
8341     for (uint32_t i = 0;
8342             i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
8343         if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
8344             max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
8345     }
8346     float ratio = 1.0 * MAX_PROCESSED_STREAMS *
8347             gCamCapability[cameraId]->active_array_size.width *
8348             gCamCapability[cameraId]->active_array_size.height * max_fps /
8349             gCamCapability[cameraId]->max_pixel_bandwidth;
8350     info->resource_cost = 100 * MIN(1.0, ratio);
8351     LOGI("camera %d resource cost is %d", cameraId,
8352             info->resource_cost);
8353 
8354     pthread_mutex_unlock(&gCamLock);
8355     return rc;
8356 }
8357 
8358 /*===========================================================================
8359  * FUNCTION   : translateCapabilityToMetadata
8360  *
8361  * DESCRIPTION: translate the capability into camera_metadata_t
8362  *
8363  * PARAMETERS : type of the request
8364  *
8365  *
8366  * RETURN     : success: camera_metadata_t*
8367  *              failure: NULL
8368  *
8369  *==========================================================================*/
translateCapabilityToMetadata(int type)8370 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
8371 {
8372     if (mDefaultMetadata[type] != NULL) {
8373         return mDefaultMetadata[type];
8374     }
8375     //first time we are handling this request
8376     //fill up the metadata structure using the wrapper class
8377     CameraMetadata settings;
8378     //translate from cam_capability_t to camera_metadata_tag_t
8379     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
8380     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
8381     int32_t defaultRequestID = 0;
8382     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
8383 
8384     /* OIS disable */
8385     char ois_prop[PROPERTY_VALUE_MAX];
8386     memset(ois_prop, 0, sizeof(ois_prop));
8387     property_get("persist.camera.ois.disable", ois_prop, "0");
8388     uint8_t ois_disable = (uint8_t)atoi(ois_prop);
8389 
8390     /* Force video to use OIS */
8391     char videoOisProp[PROPERTY_VALUE_MAX];
8392     memset(videoOisProp, 0, sizeof(videoOisProp));
8393     property_get("persist.camera.ois.video", videoOisProp, "1");
8394     uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
8395 
8396     // EIS enable/disable
8397     char eis_prop[PROPERTY_VALUE_MAX];
8398     memset(eis_prop, 0, sizeof(eis_prop));
8399     property_get("persist.camera.eis.enable", eis_prop, "0");
8400     const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
8401 
8402     // Hybrid AE enable/disable
8403     char hybrid_ae_prop[PROPERTY_VALUE_MAX];
8404     memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
8405     property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
8406     const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
8407 
8408     const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
8409     // This is a bit hacky. EIS is enabled only when the above setprop
8410     // is set to non-zero value and on back camera (for 2015 Nexus).
8411     // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
8412     // configureStream is called before this function. In other words,
8413     // we cannot guarantee the app will call configureStream before
8414     // calling createDefaultRequest.
8415     const bool eisEnabled = facingBack && eis_prop_set;
8416 
8417     uint8_t controlIntent = 0;
8418     uint8_t focusMode;
8419     uint8_t vsMode;
8420     uint8_t optStabMode;
8421     uint8_t cacMode;
8422     uint8_t edge_mode;
8423     uint8_t noise_red_mode;
8424     uint8_t tonemap_mode;
8425     bool highQualityModeEntryAvailable = FALSE;
8426     bool fastModeEntryAvailable = FALSE;
8427     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
8428     optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8429     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8430 
8431     switch (type) {
8432       case CAMERA3_TEMPLATE_PREVIEW:
8433         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
8434         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8435         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8436         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8437         edge_mode = ANDROID_EDGE_MODE_FAST;
8438         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8439         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8440         break;
8441       case CAMERA3_TEMPLATE_STILL_CAPTURE:
8442         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
8443         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8444         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8445         edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
8446         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
8447         tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
8448         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8449         // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
8450         for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8451             if (gCamCapability[mCameraId]->aberration_modes[i] ==
8452                     CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8453                 highQualityModeEntryAvailable = TRUE;
8454             } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
8455                     CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8456                 fastModeEntryAvailable = TRUE;
8457             }
8458         }
8459         if (highQualityModeEntryAvailable) {
8460             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
8461         } else if (fastModeEntryAvailable) {
8462             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8463         }
8464         if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
8465             shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
8466         }
8467         break;
8468       case CAMERA3_TEMPLATE_VIDEO_RECORD:
8469         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
8470         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8471         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8472         if (eisEnabled) {
8473             vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8474         }
8475         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8476         edge_mode = ANDROID_EDGE_MODE_FAST;
8477         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8478         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8479         if (forceVideoOis)
8480             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8481         break;
8482       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
8483         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
8484         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8485         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8486         if (eisEnabled) {
8487             vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8488         }
8489         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8490         edge_mode = ANDROID_EDGE_MODE_FAST;
8491         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8492         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8493         if (forceVideoOis)
8494             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8495         break;
8496       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
8497         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
8498         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8499         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8500         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8501         edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
8502         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
8503         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8504         break;
8505       case CAMERA3_TEMPLATE_MANUAL:
8506         edge_mode = ANDROID_EDGE_MODE_FAST;
8507         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8508         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8509         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8510         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
8511         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8512         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8513         break;
8514       default:
8515         edge_mode = ANDROID_EDGE_MODE_FAST;
8516         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8517         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8518         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8519         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
8520         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8521         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8522         break;
8523     }
8524     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
8525     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
8526     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
8527     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
8528         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8529     }
8530     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
8531 
8532     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8533             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
8534         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8535     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8536             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
8537             || ois_disable)
8538         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8539     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
8540     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
8541 
8542     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8543             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
8544 
8545     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
8546     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
8547 
8548     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
8549     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
8550 
8551     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
8552     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
8553 
8554     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
8555     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
8556 
8557     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
8558     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
8559 
8560     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
8561     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
8562 
8563     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
8564     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
8565 
8566     /*flash*/
8567     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
8568     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
8569 
8570     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
8571     settings.update(ANDROID_FLASH_FIRING_POWER,
8572             &flashFiringLevel, 1);
8573 
8574     /* lens */
8575     float default_aperture = gCamCapability[mCameraId]->apertures[0];
8576     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
8577 
8578     if (gCamCapability[mCameraId]->filter_densities_count) {
8579         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
8580         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
8581                         gCamCapability[mCameraId]->filter_densities_count);
8582     }
8583 
8584     float default_focal_length = gCamCapability[mCameraId]->focal_length;
8585     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
8586 
8587     if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
8588         float default_focus_distance = 0;
8589         settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
8590     }
8591 
8592     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
8593     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
8594 
8595     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8596     settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8597 
8598     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
8599     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
8600 
8601     /* face detection (default to OFF) */
8602     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
8603     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
8604 
8605     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
8606     settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
8607 
8608     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
8609     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
8610 
8611     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8612     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8613 
8614 
8615     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8616     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
8617 
8618     /* Exposure time(Update the Min Exposure Time)*/
8619     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
8620     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
8621 
8622     /* frame duration */
8623     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
8624     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
8625 
8626     /* sensitivity */
8627     static const int32_t default_sensitivity = 100;
8628     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
8629     static const int32_t default_isp_sensitivity =
8630             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
8631     settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
8632 
8633     /*edge mode*/
8634     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
8635 
8636     /*noise reduction mode*/
8637     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
8638 
8639     /*color correction mode*/
8640     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
8641     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
8642 
8643     /*transform matrix mode*/
8644     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
8645 
8646     int32_t scaler_crop_region[4];
8647     scaler_crop_region[0] = 0;
8648     scaler_crop_region[1] = 0;
8649     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
8650     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
8651     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
8652 
8653     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
8654     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
8655 
8656     /*focus distance*/
8657     float focus_distance = 0.0;
8658     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
8659 
8660     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
8661     /* Restrict default preview template to max 30 fps */
8662     float max_range = 0.0;
8663     float max_fixed_fps = 0.0;
8664     int32_t fps_range[2] = {0, 0};
8665     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
8666             i++) {
8667         if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
8668                 TEMPLATE_MAX_PREVIEW_FPS) {
8669             continue;
8670         }
8671         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
8672             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8673         if (type == CAMERA3_TEMPLATE_PREVIEW ||
8674                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
8675                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
8676             if (range > max_range) {
8677                 fps_range[0] =
8678                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8679                 fps_range[1] =
8680                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8681                 max_range = range;
8682             }
8683         } else {
8684             if (range < 0.01 && max_fixed_fps <
8685                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
8686                 fps_range[0] =
8687                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8688                 fps_range[1] =
8689                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8690                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8691             }
8692         }
8693     }
8694     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
8695 
8696     /*precapture trigger*/
8697     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
8698     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
8699 
8700     /*af trigger*/
8701     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
8702     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
8703 
8704     /* ae & af regions */
8705     int32_t active_region[] = {
8706             gCamCapability[mCameraId]->active_array_size.left,
8707             gCamCapability[mCameraId]->active_array_size.top,
8708             gCamCapability[mCameraId]->active_array_size.left +
8709                     gCamCapability[mCameraId]->active_array_size.width,
8710             gCamCapability[mCameraId]->active_array_size.top +
8711                     gCamCapability[mCameraId]->active_array_size.height,
8712             0};
8713     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
8714             sizeof(active_region) / sizeof(active_region[0]));
8715     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
8716             sizeof(active_region) / sizeof(active_region[0]));
8717 
8718     /* black level lock */
8719     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8720     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
8721 
8722     //special defaults for manual template
8723     if (type == CAMERA3_TEMPLATE_MANUAL) {
8724         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
8725         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
8726 
8727         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
8728         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
8729 
8730         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
8731         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
8732 
8733         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
8734         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
8735 
8736         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
8737         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
8738 
8739         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
8740         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
8741     }
8742 
8743 
8744     /* TNR
8745      * We'll use this location to determine which modes TNR will be set.
8746      * We will enable TNR to be on if either of the Preview/Video stream requires TNR
8747      * This is not to be confused with linking on a per stream basis that decision
8748      * is still on per-session basis and will be handled as part of config stream
8749      */
8750     uint8_t tnr_enable = 0;
8751 
8752     if (m_bTnrPreview || m_bTnrVideo) {
8753 
8754         switch (type) {
8755             case CAMERA3_TEMPLATE_VIDEO_RECORD:
8756                     tnr_enable = 1;
8757                     break;
8758 
8759             default:
8760                     tnr_enable = 0;
8761                     break;
8762         }
8763 
8764         int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
8765         settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8766         settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8767 
8768         LOGD("TNR:%d with process plate %d for template:%d",
8769                              tnr_enable, tnr_process_type, type);
8770     }
8771 
8772     //Update Link tags to default
8773     int32_t sync_type = CAM_TYPE_STANDALONE;
8774     settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
8775 
8776     int32_t is_main = 0; //this doesn't matter as app should overwrite
8777     settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
8778 
8779     settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
8780 
8781     /* CDS default */
8782     char prop[PROPERTY_VALUE_MAX];
8783     memset(prop, 0, sizeof(prop));
8784     property_get("persist.camera.CDS", prop, "Auto");
8785     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
8786     cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
8787     if (CAM_CDS_MODE_MAX == cds_mode) {
8788         cds_mode = CAM_CDS_MODE_AUTO;
8789     }
8790 
8791     /* Disabling CDS in templates which have TNR enabled*/
8792     if (tnr_enable)
8793         cds_mode = CAM_CDS_MODE_OFF;
8794 
8795     int32_t mode = cds_mode;
8796     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
8797 
8798     /* hybrid ae */
8799     settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
8800 
8801     mDefaultMetadata[type] = settings.release();
8802 
8803     return mDefaultMetadata[type];
8804 }
8805 
8806 /*===========================================================================
8807  * FUNCTION   : setFrameParameters
8808  *
8809  * DESCRIPTION: set parameters per frame as requested in the metadata from
8810  *              framework
8811  *
8812  * PARAMETERS :
8813  *   @request   : request that needs to be serviced
8814  *   @streamsArray : Stream ID of all the requested streams
8815  *   @blob_request: Whether this request is a blob request or not
8816  *
8817  * RETURN     : success: NO_ERROR
8818  *              failure:
8819  *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamsArray,int blob_request,uint32_t snapshotStreamId)8820 int QCamera3HardwareInterface::setFrameParameters(
8821                     camera3_capture_request_t *request,
8822                     cam_stream_ID_t streamsArray,
8823                     int blob_request,
8824                     uint32_t snapshotStreamId)
8825 {
8826     /*translate from camera_metadata_t type to parm_type_t*/
8827     int rc = 0;
8828     int32_t hal_version = CAM_HAL_V3;
8829 
8830     clear_metadata_buffer(mParameters);
8831     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8832         LOGE("Failed to set hal version in the parameters");
8833         return BAD_VALUE;
8834     }
8835 
8836     /*we need to update the frame number in the parameters*/
8837     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8838             request->frame_number)) {
8839         LOGE("Failed to set the frame number in the parameters");
8840         return BAD_VALUE;
8841     }
8842 
8843     /* Update stream id of all the requested buffers */
8844     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
8845         LOGE("Failed to set stream type mask in the parameters");
8846         return BAD_VALUE;
8847     }
8848 
8849     if (mUpdateDebugLevel) {
8850         uint32_t dummyDebugLevel = 0;
8851         /* The value of dummyDebugLevel is irrelavent. On
8852          * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
8853         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
8854                 dummyDebugLevel)) {
8855             LOGE("Failed to set UPDATE_DEBUG_LEVEL");
8856             return BAD_VALUE;
8857         }
8858         mUpdateDebugLevel = false;
8859     }
8860 
8861     if(request->settings != NULL){
8862         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
8863         if (blob_request)
8864             memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
8865     }
8866 
8867     return rc;
8868 }
8869 
8870 /*===========================================================================
8871  * FUNCTION   : setReprocParameters
8872  *
8873  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
8874  *              return it.
8875  *
8876  * PARAMETERS :
8877  *   @request   : request that needs to be serviced
8878  *
8879  * RETURN     : success: NO_ERROR
8880  *              failure:
8881  *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)8882 int32_t QCamera3HardwareInterface::setReprocParameters(
8883         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
8884         uint32_t snapshotStreamId)
8885 {
8886     /*translate from camera_metadata_t type to parm_type_t*/
8887     int rc = 0;
8888 
8889     if (NULL == request->settings){
8890         LOGE("Reprocess settings cannot be NULL");
8891         return BAD_VALUE;
8892     }
8893 
8894     if (NULL == reprocParam) {
8895         LOGE("Invalid reprocessing metadata buffer");
8896         return BAD_VALUE;
8897     }
8898     clear_metadata_buffer(reprocParam);
8899 
8900     /*we need to update the frame number in the parameters*/
8901     if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
8902             request->frame_number)) {
8903         LOGE("Failed to set the frame number in the parameters");
8904         return BAD_VALUE;
8905     }
8906 
8907     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
8908     if (rc < 0) {
8909         LOGE("Failed to translate reproc request");
8910         return rc;
8911     }
8912 
8913     CameraMetadata frame_settings;
8914     frame_settings = request->settings;
8915     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
8916             frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
8917         int32_t *crop_count =
8918                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
8919         int32_t *crop_data =
8920                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
8921         int32_t *roi_map =
8922                 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
8923         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
8924             cam_crop_data_t crop_meta;
8925             memset(&crop_meta, 0, sizeof(cam_crop_data_t));
8926             crop_meta.num_of_streams = 1;
8927             crop_meta.crop_info[0].crop.left   = crop_data[0];
8928             crop_meta.crop_info[0].crop.top    = crop_data[1];
8929             crop_meta.crop_info[0].crop.width  = crop_data[2];
8930             crop_meta.crop_info[0].crop.height = crop_data[3];
8931 
8932             crop_meta.crop_info[0].roi_map.left =
8933                     roi_map[0];
8934             crop_meta.crop_info[0].roi_map.top =
8935                     roi_map[1];
8936             crop_meta.crop_info[0].roi_map.width =
8937                     roi_map[2];
8938             crop_meta.crop_info[0].roi_map.height =
8939                     roi_map[3];
8940 
8941             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
8942                 rc = BAD_VALUE;
8943             }
8944             LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
8945                     request->input_buffer->stream,
8946                     crop_meta.crop_info[0].crop.left,
8947                     crop_meta.crop_info[0].crop.top,
8948                     crop_meta.crop_info[0].crop.width,
8949                     crop_meta.crop_info[0].crop.height);
8950             LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
8951                     request->input_buffer->stream,
8952                     crop_meta.crop_info[0].roi_map.left,
8953                     crop_meta.crop_info[0].roi_map.top,
8954                     crop_meta.crop_info[0].roi_map.width,
8955                     crop_meta.crop_info[0].roi_map.height);
8956             } else {
8957                 LOGE("Invalid reprocess crop count %d!", *crop_count);
8958             }
8959     } else {
8960         LOGE("No crop data from matching output stream");
8961     }
8962 
8963     /* These settings are not needed for regular requests so handle them specially for
8964        reprocess requests; information needed for EXIF tags */
8965     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8966         int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8967                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8968         if (NAME_NOT_FOUND != val) {
8969             uint32_t flashMode = (uint32_t)val;
8970             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
8971                 rc = BAD_VALUE;
8972             }
8973         } else {
8974             LOGE("Could not map fwk flash mode %d to correct hal flash mode",
8975                     frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8976         }
8977     } else {
8978         LOGH("No flash mode in reprocess settings");
8979     }
8980 
8981     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
8982         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
8983         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
8984             rc = BAD_VALUE;
8985         }
8986     } else {
8987         LOGH("No flash state in reprocess settings");
8988     }
8989 
8990     return rc;
8991 }
8992 
8993 /*===========================================================================
8994  * FUNCTION   : saveRequestSettings
8995  *
8996  * DESCRIPTION: Add any settings that might have changed to the request settings
8997  *              and save the settings to be applied on the frame
8998  *
8999  * PARAMETERS :
9000  *   @jpegMetadata : the extracted and/or modified jpeg metadata
9001  *   @request      : request with initial settings
9002  *
9003  * RETURN     :
9004  * camera_metadata_t* : pointer to the saved request settings
9005  *==========================================================================*/
saveRequestSettings(const CameraMetadata & jpegMetadata,camera3_capture_request_t * request)9006 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
9007         const CameraMetadata &jpegMetadata,
9008         camera3_capture_request_t *request)
9009 {
9010     camera_metadata_t *resultMetadata;
9011     CameraMetadata camMetadata;
9012     camMetadata = request->settings;
9013 
9014     if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9015         int32_t thumbnail_size[2];
9016         thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9017         thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9018         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
9019                 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
9020     }
9021 
9022     resultMetadata = camMetadata.release();
9023     return resultMetadata;
9024 }
9025 
9026 /*===========================================================================
9027  * FUNCTION   : setHalFpsRange
9028  *
9029  * DESCRIPTION: set FPS range parameter
9030  *
9031  *
9032  * PARAMETERS :
9033  *   @settings    : Metadata from framework
9034  *   @hal_metadata: Metadata buffer
9035  *
9036  *
9037  * RETURN     : success: NO_ERROR
9038  *              failure:
9039  *==========================================================================*/
setHalFpsRange(const CameraMetadata & settings,metadata_buffer_t * hal_metadata)9040 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
9041         metadata_buffer_t *hal_metadata)
9042 {
9043     int32_t rc = NO_ERROR;
9044     cam_fps_range_t fps_range;
9045     fps_range.min_fps = (float)
9046             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
9047     fps_range.max_fps = (float)
9048             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
9049     fps_range.video_min_fps = fps_range.min_fps;
9050     fps_range.video_max_fps = fps_range.max_fps;
9051 
9052     LOGD("aeTargetFpsRange fps: [%f %f]",
9053             fps_range.min_fps, fps_range.max_fps);
9054     /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
9055      * follows:
9056      * ---------------------------------------------------------------|
9057      *      Video stream is absent in configure_streams               |
9058      *    (Camcorder preview before the first video record            |
9059      * ---------------------------------------------------------------|
9060      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9061      *                   |             |             | vid_min/max_fps|
9062      * ---------------------------------------------------------------|
9063      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
9064      *                   |-------------|-------------|----------------|
9065      *                   |  [240, 240] |     240     |  [240, 240]    |
9066      * ---------------------------------------------------------------|
9067      *     Video stream is present in configure_streams               |
9068      * ---------------------------------------------------------------|
9069      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9070      *                   |             |             | vid_min/max_fps|
9071      * ---------------------------------------------------------------|
9072      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
9073      * (camcorder prev   |-------------|-------------|----------------|
9074      *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
9075      *  is stopped)      |             |             |                |
9076      * ---------------------------------------------------------------|
9077      *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
9078      *                   |-------------|-------------|----------------|
9079      *                   |  [240, 240] |     240     |  [240, 240]    |
9080      * ---------------------------------------------------------------|
9081      * When Video stream is absent in configure_streams,
9082      * preview fps = sensor_fps / batchsize
9083      * Eg: for 240fps at batchSize 4, preview = 60fps
9084      *     for 120fps at batchSize 4, preview = 30fps
9085      *
9086      * When video stream is present in configure_streams, preview fps is as per
9087      * the ratio of preview buffers to video buffers requested in process
9088      * capture request
9089      */
9090     mBatchSize = 0;
9091     if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
9092         fps_range.min_fps = fps_range.video_max_fps;
9093         fps_range.video_min_fps = fps_range.video_max_fps;
9094         int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
9095                 fps_range.max_fps);
9096         if (NAME_NOT_FOUND != val) {
9097             cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
9098             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9099                 return BAD_VALUE;
9100             }
9101 
9102             if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
9103                 /* If batchmode is currently in progress and the fps changes,
9104                  * set the flag to restart the sensor */
9105                 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
9106                         (mHFRVideoFps != fps_range.max_fps)) {
9107                     mNeedSensorRestart = true;
9108                 }
9109                 mHFRVideoFps = fps_range.max_fps;
9110                 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
9111                 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
9112                     mBatchSize = MAX_HFR_BATCH_SIZE;
9113                 }
9114              }
9115             LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
9116 
9117          }
9118     } else {
9119         /* HFR mode is session param in backend/ISP. This should be reset when
9120          * in non-HFR mode  */
9121         cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
9122         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9123             return BAD_VALUE;
9124         }
9125     }
9126     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
9127         return BAD_VALUE;
9128     }
9129     LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
9130             fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
9131     return rc;
9132 }
9133 
9134 /*===========================================================================
9135  * FUNCTION   : translateToHalMetadata
9136  *
9137  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
9138  *
9139  *
9140  * PARAMETERS :
9141  *   @request  : request sent from framework
9142  *
9143  *
9144  * RETURN     : success: NO_ERROR
9145  *              failure:
9146  *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)9147 int QCamera3HardwareInterface::translateToHalMetadata
9148                                   (const camera3_capture_request_t *request,
9149                                    metadata_buffer_t *hal_metadata,
9150                                    uint32_t snapshotStreamId)
9151 {
9152     int rc = 0;
9153     CameraMetadata frame_settings;
9154     frame_settings = request->settings;
9155 
9156     /* Do not change the order of the following list unless you know what you are
9157      * doing.
9158      * The order is laid out in such a way that parameters in the front of the table
9159      * may be used to override the parameters later in the table. Examples are:
9160      * 1. META_MODE should precede AEC/AWB/AF MODE
9161      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
9162      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
9163      * 4. Any mode should precede it's corresponding settings
9164      */
9165     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
9166         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
9167         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
9168             rc = BAD_VALUE;
9169         }
9170         rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
9171         if (rc != NO_ERROR) {
9172             LOGE("extractSceneMode failed");
9173         }
9174     }
9175 
9176     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9177         uint8_t fwk_aeMode =
9178             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9179         uint8_t aeMode;
9180         int32_t redeye;
9181 
9182         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
9183             aeMode = CAM_AE_MODE_OFF;
9184         } else {
9185             aeMode = CAM_AE_MODE_ON;
9186         }
9187         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
9188             redeye = 1;
9189         } else {
9190             redeye = 0;
9191         }
9192 
9193         int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
9194                 fwk_aeMode);
9195         if (NAME_NOT_FOUND != val) {
9196             int32_t flashMode = (int32_t)val;
9197             ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
9198         }
9199 
9200         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
9201         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
9202             rc = BAD_VALUE;
9203         }
9204     }
9205 
9206     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
9207         uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
9208         int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9209                 fwk_whiteLevel);
9210         if (NAME_NOT_FOUND != val) {
9211             uint8_t whiteLevel = (uint8_t)val;
9212             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
9213                 rc = BAD_VALUE;
9214             }
9215         }
9216     }
9217 
9218     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
9219         uint8_t fwk_cacMode =
9220                 frame_settings.find(
9221                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
9222         int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
9223                 fwk_cacMode);
9224         if (NAME_NOT_FOUND != val) {
9225             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
9226             bool entryAvailable = FALSE;
9227             // Check whether Frameworks set CAC mode is supported in device or not
9228             for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9229                 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
9230                     entryAvailable = TRUE;
9231                     break;
9232                 }
9233             }
9234             LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
9235             // If entry not found then set the device supported mode instead of frameworks mode i.e,
9236             // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
9237             // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
9238             if (entryAvailable == FALSE) {
9239                 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9240                     cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9241                 } else {
9242                     if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9243                         // High is not supported and so set the FAST as spec say's underlying
9244                         // device implementation can be the same for both modes.
9245                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
9246                     } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9247                         // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
9248                         // in order to avoid the fps drop due to high quality
9249                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9250                     } else {
9251                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9252                     }
9253                 }
9254             }
9255             LOGD("Final cacMode is %d", cacMode);
9256             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
9257                 rc = BAD_VALUE;
9258             }
9259         } else {
9260             LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
9261         }
9262     }
9263 
9264     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
9265         uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
9266         int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9267                 fwk_focusMode);
9268         if (NAME_NOT_FOUND != val) {
9269             uint8_t focusMode = (uint8_t)val;
9270             LOGD("set focus mode %d", focusMode);
9271             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
9272                 rc = BAD_VALUE;
9273             }
9274         }
9275     }
9276 
9277     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
9278         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
9279         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
9280                 focalDistance)) {
9281             rc = BAD_VALUE;
9282         }
9283     }
9284 
9285     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
9286         uint8_t fwk_antibandingMode =
9287                 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
9288         int val = lookupHalName(ANTIBANDING_MODES_MAP,
9289                 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
9290         if (NAME_NOT_FOUND != val) {
9291             uint32_t hal_antibandingMode = (uint32_t)val;
9292             if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
9293                 if (m60HzZone) {
9294                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
9295                 } else {
9296                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
9297                 }
9298             }
9299             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
9300                     hal_antibandingMode)) {
9301                 rc = BAD_VALUE;
9302             }
9303         }
9304     }
9305 
9306     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
9307         int32_t expCompensation = frame_settings.find(
9308                 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
9309         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
9310             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
9311         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
9312             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
9313         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
9314                 expCompensation)) {
9315             rc = BAD_VALUE;
9316         }
9317     }
9318 
9319     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
9320         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
9321         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
9322             rc = BAD_VALUE;
9323         }
9324     }
9325     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
9326         rc = setHalFpsRange(frame_settings, hal_metadata);
9327         if (rc != NO_ERROR) {
9328             LOGE("setHalFpsRange failed");
9329         }
9330     }
9331 
9332     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
9333         uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
9334         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
9335             rc = BAD_VALUE;
9336         }
9337     }
9338 
9339     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
9340         uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
9341         int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9342                 fwk_effectMode);
9343         if (NAME_NOT_FOUND != val) {
9344             uint8_t effectMode = (uint8_t)val;
9345             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
9346                 rc = BAD_VALUE;
9347             }
9348         }
9349     }
9350 
9351     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
9352         uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
9353         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
9354                 colorCorrectMode)) {
9355             rc = BAD_VALUE;
9356         }
9357     }
9358 
9359     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
9360         cam_color_correct_gains_t colorCorrectGains;
9361         for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
9362             colorCorrectGains.gains[i] =
9363                     frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
9364         }
9365         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
9366                 colorCorrectGains)) {
9367             rc = BAD_VALUE;
9368         }
9369     }
9370 
9371     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
9372         cam_color_correct_matrix_t colorCorrectTransform;
9373         cam_rational_type_t transform_elem;
9374         size_t num = 0;
9375         for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
9376            for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
9377               transform_elem.numerator =
9378                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
9379               transform_elem.denominator =
9380                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
9381               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
9382               num++;
9383            }
9384         }
9385         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
9386                 colorCorrectTransform)) {
9387             rc = BAD_VALUE;
9388         }
9389     }
9390 
9391     cam_trigger_t aecTrigger;
9392     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
9393     aecTrigger.trigger_id = -1;
9394     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
9395         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
9396         aecTrigger.trigger =
9397             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
9398         aecTrigger.trigger_id =
9399             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
9400         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
9401                 aecTrigger)) {
9402             rc = BAD_VALUE;
9403         }
9404         LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
9405                 aecTrigger.trigger, aecTrigger.trigger_id);
9406     }
9407 
9408     /*af_trigger must come with a trigger id*/
9409     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
9410         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
9411         cam_trigger_t af_trigger;
9412         af_trigger.trigger =
9413             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
9414         af_trigger.trigger_id =
9415             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
9416         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
9417             rc = BAD_VALUE;
9418         }
9419         LOGD("AfTrigger: %d AfTriggerID: %d",
9420                 af_trigger.trigger, af_trigger.trigger_id);
9421     }
9422 
9423     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
9424         int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
9425         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
9426             rc = BAD_VALUE;
9427         }
9428     }
9429     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
9430         cam_edge_application_t edge_application;
9431         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
9432         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
9433             edge_application.sharpness = 0;
9434         } else {
9435             edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
9436         }
9437         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
9438             rc = BAD_VALUE;
9439         }
9440     }
9441 
9442     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9443         int32_t respectFlashMode = 1;
9444         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9445             uint8_t fwk_aeMode =
9446                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9447             if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
9448                 respectFlashMode = 0;
9449                 LOGH("AE Mode controls flash, ignore android.flash.mode");
9450             }
9451         }
9452         if (respectFlashMode) {
9453             int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9454                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9455             LOGH("flash mode after mapping %d", val);
9456             // To check: CAM_INTF_META_FLASH_MODE usage
9457             if (NAME_NOT_FOUND != val) {
9458                 uint8_t flashMode = (uint8_t)val;
9459                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
9460                     rc = BAD_VALUE;
9461                 }
9462             }
9463         }
9464     }
9465 
9466     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
9467         uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
9468         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
9469             rc = BAD_VALUE;
9470         }
9471     }
9472 
9473     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
9474         int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
9475         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
9476                 flashFiringTime)) {
9477             rc = BAD_VALUE;
9478         }
9479     }
9480 
9481     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
9482         uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
9483         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
9484                 hotPixelMode)) {
9485             rc = BAD_VALUE;
9486         }
9487     }
9488 
9489     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
9490         float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
9491         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
9492                 lensAperture)) {
9493             rc = BAD_VALUE;
9494         }
9495     }
9496 
9497     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
9498         float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
9499         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
9500                 filterDensity)) {
9501             rc = BAD_VALUE;
9502         }
9503     }
9504 
9505     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
9506         float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
9507         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
9508                 focalLength)) {
9509             rc = BAD_VALUE;
9510         }
9511     }
9512 
9513     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
9514         uint8_t optStabMode =
9515                 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
9516         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
9517                 optStabMode)) {
9518             rc = BAD_VALUE;
9519         }
9520     }
9521 
9522     if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
9523         uint8_t videoStabMode =
9524                 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
9525         LOGD("videoStabMode from APP = %d", videoStabMode);
9526         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
9527                 videoStabMode)) {
9528             rc = BAD_VALUE;
9529         }
9530     }
9531 
9532 
9533     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
9534         uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
9535         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
9536                 noiseRedMode)) {
9537             rc = BAD_VALUE;
9538         }
9539     }
9540 
9541     if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
9542         float reprocessEffectiveExposureFactor =
9543             frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
9544         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
9545                 reprocessEffectiveExposureFactor)) {
9546             rc = BAD_VALUE;
9547         }
9548     }
9549 
9550     cam_crop_region_t scalerCropRegion;
9551     bool scalerCropSet = false;
9552     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
9553         scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
9554         scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
9555         scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
9556         scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
9557 
9558         // Map coordinate system from active array to sensor output.
9559         mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
9560                 scalerCropRegion.width, scalerCropRegion.height);
9561 
9562         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
9563                 scalerCropRegion)) {
9564             rc = BAD_VALUE;
9565         }
9566         scalerCropSet = true;
9567     }
9568 
9569     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
9570         int64_t sensorExpTime =
9571                 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
9572         LOGD("setting sensorExpTime %lld", sensorExpTime);
9573         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
9574                 sensorExpTime)) {
9575             rc = BAD_VALUE;
9576         }
9577     }
9578 
9579     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
9580         int64_t sensorFrameDuration =
9581                 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
9582         int64_t minFrameDuration = getMinFrameDuration(request);
9583         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
9584         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
9585             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
9586         LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
9587         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
9588                 sensorFrameDuration)) {
9589             rc = BAD_VALUE;
9590         }
9591     }
9592 
9593     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
9594         int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
9595         if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
9596                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
9597         if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
9598                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
9599         LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
9600         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
9601                 sensorSensitivity)) {
9602             rc = BAD_VALUE;
9603         }
9604     }
9605 
9606     if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
9607         int32_t ispSensitivity =
9608             frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
9609         if (ispSensitivity <
9610             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
9611                 ispSensitivity =
9612                     gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9613                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
9614         }
9615         if (ispSensitivity >
9616             gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
9617                 ispSensitivity =
9618                     gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
9619                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
9620         }
9621         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
9622                 ispSensitivity)) {
9623             rc = BAD_VALUE;
9624         }
9625     }
9626 
9627     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
9628         uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
9629         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
9630             rc = BAD_VALUE;
9631         }
9632     }
9633 
9634     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
9635         uint8_t fwk_facedetectMode =
9636                 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
9637 
9638         int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
9639                 fwk_facedetectMode);
9640 
9641         if (NAME_NOT_FOUND != val) {
9642             uint8_t facedetectMode = (uint8_t)val;
9643             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
9644                     facedetectMode)) {
9645                 rc = BAD_VALUE;
9646             }
9647         }
9648     }
9649 
9650     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
9651         uint8_t histogramMode =
9652                 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
9653         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
9654                 histogramMode)) {
9655             rc = BAD_VALUE;
9656         }
9657     }
9658 
9659     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
9660         uint8_t sharpnessMapMode =
9661                 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
9662         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
9663                 sharpnessMapMode)) {
9664             rc = BAD_VALUE;
9665         }
9666     }
9667 
9668     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
9669         uint8_t tonemapMode =
9670                 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
9671         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
9672             rc = BAD_VALUE;
9673         }
9674     }
9675     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
9676     /*All tonemap channels will have the same number of points*/
9677     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
9678         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
9679         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
9680         cam_rgb_tonemap_curves tonemapCurves;
9681         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
9682         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
9683             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
9684                      tonemapCurves.tonemap_points_cnt,
9685                     CAM_MAX_TONEMAP_CURVE_SIZE);
9686             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
9687         }
9688 
9689         /* ch0 = G*/
9690         size_t point = 0;
9691         cam_tonemap_curve_t tonemapCurveGreen;
9692         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9693             for (size_t j = 0; j < 2; j++) {
9694                tonemapCurveGreen.tonemap_points[i][j] =
9695                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
9696                point++;
9697             }
9698         }
9699         tonemapCurves.curves[0] = tonemapCurveGreen;
9700 
9701         /* ch 1 = B */
9702         point = 0;
9703         cam_tonemap_curve_t tonemapCurveBlue;
9704         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9705             for (size_t j = 0; j < 2; j++) {
9706                tonemapCurveBlue.tonemap_points[i][j] =
9707                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
9708                point++;
9709             }
9710         }
9711         tonemapCurves.curves[1] = tonemapCurveBlue;
9712 
9713         /* ch 2 = R */
9714         point = 0;
9715         cam_tonemap_curve_t tonemapCurveRed;
9716         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9717             for (size_t j = 0; j < 2; j++) {
9718                tonemapCurveRed.tonemap_points[i][j] =
9719                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
9720                point++;
9721             }
9722         }
9723         tonemapCurves.curves[2] = tonemapCurveRed;
9724 
9725         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
9726                 tonemapCurves)) {
9727             rc = BAD_VALUE;
9728         }
9729     }
9730 
9731     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
9732         uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
9733         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
9734                 captureIntent)) {
9735             rc = BAD_VALUE;
9736         }
9737     }
9738 
9739     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
9740         uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
9741         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
9742                 blackLevelLock)) {
9743             rc = BAD_VALUE;
9744         }
9745     }
9746 
9747     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
9748         uint8_t lensShadingMapMode =
9749                 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
9750         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
9751                 lensShadingMapMode)) {
9752             rc = BAD_VALUE;
9753         }
9754     }
9755 
9756     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
9757         cam_area_t roi;
9758         bool reset = true;
9759         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
9760 
9761         // Map coordinate system from active array to sensor output.
9762         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9763                 roi.rect.height);
9764 
9765         if (scalerCropSet) {
9766             reset = resetIfNeededROI(&roi, &scalerCropRegion);
9767         }
9768         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
9769             rc = BAD_VALUE;
9770         }
9771     }
9772 
9773     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
9774         cam_area_t roi;
9775         bool reset = true;
9776         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
9777 
9778         // Map coordinate system from active array to sensor output.
9779         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9780                 roi.rect.height);
9781 
9782         if (scalerCropSet) {
9783             reset = resetIfNeededROI(&roi, &scalerCropRegion);
9784         }
9785         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
9786             rc = BAD_VALUE;
9787         }
9788     }
9789 
9790     // CDS for non-HFR non-video mode
9791     if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
9792             !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
9793         int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
9794         if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
9795             LOGE("Invalid CDS mode %d!", *fwk_cds);
9796         } else {
9797             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9798                     CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
9799                 rc = BAD_VALUE;
9800             }
9801         }
9802     }
9803 
9804     // TNR
9805     if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
9806         frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
9807         uint8_t b_TnrRequested = 0;
9808         cam_denoise_param_t tnr;
9809         tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
9810         tnr.process_plates =
9811             (cam_denoise_process_type_t)frame_settings.find(
9812             QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
9813         b_TnrRequested = tnr.denoise_enable;
9814         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
9815             rc = BAD_VALUE;
9816         }
9817     }
9818 
9819     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
9820         int32_t fwk_testPatternMode =
9821                 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
9822         int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
9823                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
9824 
9825         if (NAME_NOT_FOUND != testPatternMode) {
9826             cam_test_pattern_data_t testPatternData;
9827             memset(&testPatternData, 0, sizeof(testPatternData));
9828             testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
9829             if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
9830                     frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
9831                 int32_t *fwk_testPatternData =
9832                         frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
9833                 testPatternData.r = fwk_testPatternData[0];
9834                 testPatternData.b = fwk_testPatternData[3];
9835                 switch (gCamCapability[mCameraId]->color_arrangement) {
9836                     case CAM_FILTER_ARRANGEMENT_RGGB:
9837                     case CAM_FILTER_ARRANGEMENT_GRBG:
9838                         testPatternData.gr = fwk_testPatternData[1];
9839                         testPatternData.gb = fwk_testPatternData[2];
9840                         break;
9841                     case CAM_FILTER_ARRANGEMENT_GBRG:
9842                     case CAM_FILTER_ARRANGEMENT_BGGR:
9843                         testPatternData.gr = fwk_testPatternData[2];
9844                         testPatternData.gb = fwk_testPatternData[1];
9845                         break;
9846                     default:
9847                         LOGE("color arrangement %d is not supported",
9848                                 gCamCapability[mCameraId]->color_arrangement);
9849                         break;
9850                 }
9851             }
9852             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
9853                     testPatternData)) {
9854                 rc = BAD_VALUE;
9855             }
9856         } else {
9857             LOGE("Invalid framework sensor test pattern mode %d",
9858                     fwk_testPatternMode);
9859         }
9860     }
9861 
9862     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
9863         size_t count = 0;
9864         camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
9865         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
9866                 gps_coords.data.d, gps_coords.count, count);
9867         if (gps_coords.count != count) {
9868             rc = BAD_VALUE;
9869         }
9870     }
9871 
9872     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
9873         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
9874         size_t count = 0;
9875         const char *gps_methods_src = (const char *)
9876                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
9877         memset(gps_methods, '\0', sizeof(gps_methods));
9878         strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
9879         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
9880                 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
9881         if (GPS_PROCESSING_METHOD_SIZE != count) {
9882             rc = BAD_VALUE;
9883         }
9884     }
9885 
9886     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
9887         int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
9888         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
9889                 gps_timestamp)) {
9890             rc = BAD_VALUE;
9891         }
9892     }
9893 
9894     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9895         int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9896         cam_rotation_info_t rotation_info;
9897         if (orientation == 0) {
9898            rotation_info.rotation = ROTATE_0;
9899         } else if (orientation == 90) {
9900            rotation_info.rotation = ROTATE_90;
9901         } else if (orientation == 180) {
9902            rotation_info.rotation = ROTATE_180;
9903         } else if (orientation == 270) {
9904            rotation_info.rotation = ROTATE_270;
9905         }
9906         rotation_info.device_rotation = ROTATE_0;
9907         rotation_info.streamId = snapshotStreamId;
9908         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
9909         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
9910             rc = BAD_VALUE;
9911         }
9912     }
9913 
9914     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
9915         uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
9916         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
9917             rc = BAD_VALUE;
9918         }
9919     }
9920 
9921     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
9922         uint32_t thumb_quality = (uint32_t)
9923                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
9924         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
9925                 thumb_quality)) {
9926             rc = BAD_VALUE;
9927         }
9928     }
9929 
9930     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9931         cam_dimension_t dim;
9932         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9933         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9934         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
9935             rc = BAD_VALUE;
9936         }
9937     }
9938 
9939     // Internal metadata
9940     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
9941         size_t count = 0;
9942         camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
9943         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
9944                 privatedata.data.i32, privatedata.count, count);
9945         if (privatedata.count != count) {
9946             rc = BAD_VALUE;
9947         }
9948     }
9949 
9950     // EV step
9951     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
9952             gCamCapability[mCameraId]->exp_compensation_step)) {
9953         rc = BAD_VALUE;
9954     }
9955 
9956     // CDS info
9957     if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
9958         cam_cds_data_t *cdsData = (cam_cds_data_t *)
9959                 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
9960 
9961         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9962                 CAM_INTF_META_CDS_DATA, *cdsData)) {
9963             rc = BAD_VALUE;
9964         }
9965     }
9966 
9967     // Hybrid AE
9968     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
9969         uint8_t *hybrid_ae = (uint8_t *)
9970                 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
9971 
9972         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9973                 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
9974             rc = BAD_VALUE;
9975         }
9976     }
9977 
9978     return rc;
9979 }
9980 
9981 /*===========================================================================
9982  * FUNCTION   : captureResultCb
9983  *
9984  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
9985  *
9986  * PARAMETERS :
9987  *   @frame  : frame information from mm-camera-interface
9988  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
9989  *   @userdata: userdata
9990  *
9991  * RETURN     : NONE
9992  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer,void * userdata)9993 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
9994                 camera3_stream_buffer_t *buffer,
9995                 uint32_t frame_number, bool isInputBuffer, void *userdata)
9996 {
9997     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
9998     if (hw == NULL) {
9999         LOGE("Invalid hw %p", hw);
10000         return;
10001     }
10002 
10003     hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
10004     return;
10005 }
10006 
10007 /*===========================================================================
10008  * FUNCTION   : setBufferErrorStatus
10009  *
10010  * DESCRIPTION: Callback handler for channels to report any buffer errors
10011  *
10012  * PARAMETERS :
10013  *   @ch     : Channel on which buffer error is reported from
10014  *   @frame_number  : frame number on which buffer error is reported on
10015  *   @buffer_status : buffer error status
10016  *   @userdata: userdata
10017  *
10018  * RETURN     : NONE
10019  *==========================================================================*/
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frame_number,camera3_buffer_status_t err,void * userdata)10020 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
10021                 uint32_t frame_number, camera3_buffer_status_t err,
10022                 void *userdata)
10023 {
10024     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
10025     if (hw == NULL) {
10026         LOGE("Invalid hw %p", hw);
10027         return;
10028     }
10029 
10030     hw->setBufferErrorStatus(ch, frame_number, err);
10031     return;
10032 }
10033 
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frameNumber,camera3_buffer_status_t err)10034 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
10035                 uint32_t frameNumber, camera3_buffer_status_t err)
10036 {
10037     LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
10038     pthread_mutex_lock(&mMutex);
10039 
10040     for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
10041         if (req.frame_number != frameNumber)
10042             continue;
10043         for (auto& k : req.mPendingBufferList) {
10044             if(k.stream->priv == ch) {
10045                 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
10046             }
10047         }
10048     }
10049 
10050     pthread_mutex_unlock(&mMutex);
10051     return;
10052 }
10053 /*===========================================================================
10054  * FUNCTION   : initialize
10055  *
10056  * DESCRIPTION: Pass framework callback pointers to HAL
10057  *
10058  * PARAMETERS :
10059  *
10060  *
10061  * RETURN     : Success : 0
10062  *              Failure: -ENODEV
10063  *==========================================================================*/
10064 
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)10065 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
10066                                   const camera3_callback_ops_t *callback_ops)
10067 {
10068     LOGD("E");
10069     QCamera3HardwareInterface *hw =
10070         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10071     if (!hw) {
10072         LOGE("NULL camera device");
10073         return -ENODEV;
10074     }
10075 
10076     int rc = hw->initialize(callback_ops);
10077     LOGD("X");
10078     return rc;
10079 }
10080 
10081 /*===========================================================================
10082  * FUNCTION   : configure_streams
10083  *
10084  * DESCRIPTION:
10085  *
10086  * PARAMETERS :
10087  *
10088  *
10089  * RETURN     : Success: 0
10090  *              Failure: -EINVAL (if stream configuration is invalid)
10091  *                       -ENODEV (fatal error)
10092  *==========================================================================*/
10093 
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)10094 int QCamera3HardwareInterface::configure_streams(
10095         const struct camera3_device *device,
10096         camera3_stream_configuration_t *stream_list)
10097 {
10098     LOGD("E");
10099     QCamera3HardwareInterface *hw =
10100         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10101     if (!hw) {
10102         LOGE("NULL camera device");
10103         return -ENODEV;
10104     }
10105     int rc = hw->configureStreams(stream_list);
10106     LOGD("X");
10107     return rc;
10108 }
10109 
10110 /*===========================================================================
10111  * FUNCTION   : construct_default_request_settings
10112  *
10113  * DESCRIPTION: Configure a settings buffer to meet the required use case
10114  *
10115  * PARAMETERS :
10116  *
10117  *
10118  * RETURN     : Success: Return valid metadata
10119  *              Failure: Return NULL
10120  *==========================================================================*/
10121 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)10122     construct_default_request_settings(const struct camera3_device *device,
10123                                         int type)
10124 {
10125 
10126     LOGD("E");
10127     camera_metadata_t* fwk_metadata = NULL;
10128     QCamera3HardwareInterface *hw =
10129         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10130     if (!hw) {
10131         LOGE("NULL camera device");
10132         return NULL;
10133     }
10134 
10135     fwk_metadata = hw->translateCapabilityToMetadata(type);
10136 
10137     LOGD("X");
10138     return fwk_metadata;
10139 }
10140 
10141 /*===========================================================================
10142  * FUNCTION   : process_capture_request
10143  *
10144  * DESCRIPTION:
10145  *
10146  * PARAMETERS :
10147  *
10148  *
10149  * RETURN     :
10150  *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)10151 int QCamera3HardwareInterface::process_capture_request(
10152                     const struct camera3_device *device,
10153                     camera3_capture_request_t *request)
10154 {
10155     LOGD("E");
10156     QCamera3HardwareInterface *hw =
10157         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10158     if (!hw) {
10159         LOGE("NULL camera device");
10160         return -EINVAL;
10161     }
10162 
10163     int rc = hw->processCaptureRequest(request);
10164     LOGD("X");
10165     return rc;
10166 }
10167 
10168 /*===========================================================================
10169  * FUNCTION   : dump
10170  *
10171  * DESCRIPTION:
10172  *
10173  * PARAMETERS :
10174  *
10175  *
10176  * RETURN     :
10177  *==========================================================================*/
10178 
dump(const struct camera3_device * device,int fd)10179 void QCamera3HardwareInterface::dump(
10180                 const struct camera3_device *device, int fd)
10181 {
10182     /* Log level property is read when "adb shell dumpsys media.camera" is
10183        called so that the log level can be controlled without restarting
10184        the media server */
10185     getLogLevel();
10186 
10187     LOGD("E");
10188     QCamera3HardwareInterface *hw =
10189         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10190     if (!hw) {
10191         LOGE("NULL camera device");
10192         return;
10193     }
10194 
10195     hw->dump(fd);
10196     LOGD("X");
10197     return;
10198 }
10199 
10200 /*===========================================================================
10201  * FUNCTION   : flush
10202  *
10203  * DESCRIPTION:
10204  *
10205  * PARAMETERS :
10206  *
10207  *
10208  * RETURN     :
10209  *==========================================================================*/
10210 
flush(const struct camera3_device * device)10211 int QCamera3HardwareInterface::flush(
10212                 const struct camera3_device *device)
10213 {
10214     int rc;
10215     LOGD("E");
10216     QCamera3HardwareInterface *hw =
10217         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10218     if (!hw) {
10219         LOGE("NULL camera device");
10220         return -EINVAL;
10221     }
10222 
10223     pthread_mutex_lock(&hw->mMutex);
10224     // Validate current state
10225     switch (hw->mState) {
10226         case STARTED:
10227             /* valid state */
10228             break;
10229 
10230         case ERROR:
10231             pthread_mutex_unlock(&hw->mMutex);
10232             hw->handleCameraDeviceError();
10233             return -ENODEV;
10234 
10235         default:
10236             LOGI("Flush returned during state %d", hw->mState);
10237             pthread_mutex_unlock(&hw->mMutex);
10238             return 0;
10239     }
10240     pthread_mutex_unlock(&hw->mMutex);
10241 
10242     rc = hw->flush(true /* restart channels */ );
10243     LOGD("X");
10244     return rc;
10245 }
10246 
10247 /*===========================================================================
10248  * FUNCTION   : close_camera_device
10249  *
10250  * DESCRIPTION:
10251  *
10252  * PARAMETERS :
10253  *
10254  *
10255  * RETURN     :
10256  *==========================================================================*/
close_camera_device(struct hw_device_t * device)10257 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
10258 {
10259     int ret = NO_ERROR;
10260     QCamera3HardwareInterface *hw =
10261         reinterpret_cast<QCamera3HardwareInterface *>(
10262             reinterpret_cast<camera3_device_t *>(device)->priv);
10263     if (!hw) {
10264         LOGE("NULL camera device");
10265         return BAD_VALUE;
10266     }
10267 
10268     LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
10269     delete hw;
10270     LOGI("[KPI Perf]: X");
10271     return ret;
10272 }
10273 
10274 /*===========================================================================
10275  * FUNCTION   : getWaveletDenoiseProcessPlate
10276  *
10277  * DESCRIPTION: query wavelet denoise process plate
10278  *
10279  * PARAMETERS : None
10280  *
10281  * RETURN     : WNR prcocess plate value
10282  *==========================================================================*/
getWaveletDenoiseProcessPlate()10283 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
10284 {
10285     char prop[PROPERTY_VALUE_MAX];
10286     memset(prop, 0, sizeof(prop));
10287     property_get("persist.denoise.process.plates", prop, "0");
10288     int processPlate = atoi(prop);
10289     switch(processPlate) {
10290     case 0:
10291         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10292     case 1:
10293         return CAM_WAVELET_DENOISE_CBCR_ONLY;
10294     case 2:
10295         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10296     case 3:
10297         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10298     default:
10299         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10300     }
10301 }
10302 
10303 
10304 /*===========================================================================
10305  * FUNCTION   : getTemporalDenoiseProcessPlate
10306  *
10307  * DESCRIPTION: query temporal denoise process plate
10308  *
10309  * PARAMETERS : None
10310  *
10311  * RETURN     : TNR prcocess plate value
10312  *==========================================================================*/
getTemporalDenoiseProcessPlate()10313 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
10314 {
10315     char prop[PROPERTY_VALUE_MAX];
10316     memset(prop, 0, sizeof(prop));
10317     property_get("persist.tnr.process.plates", prop, "0");
10318     int processPlate = atoi(prop);
10319     switch(processPlate) {
10320     case 0:
10321         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10322     case 1:
10323         return CAM_WAVELET_DENOISE_CBCR_ONLY;
10324     case 2:
10325         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10326     case 3:
10327         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10328     default:
10329         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10330     }
10331 }
10332 
10333 
10334 /*===========================================================================
10335  * FUNCTION   : extractSceneMode
10336  *
10337  * DESCRIPTION: Extract scene mode from frameworks set metadata
10338  *
10339  * PARAMETERS :
10340  *      @frame_settings: CameraMetadata reference
10341  *      @metaMode: ANDROID_CONTORL_MODE
10342  *      @hal_metadata: hal metadata structure
10343  *
10344  * RETURN     : None
10345  *==========================================================================*/
extractSceneMode(const CameraMetadata & frame_settings,uint8_t metaMode,metadata_buffer_t * hal_metadata)10346 int32_t QCamera3HardwareInterface::extractSceneMode(
10347         const CameraMetadata &frame_settings, uint8_t metaMode,
10348         metadata_buffer_t *hal_metadata)
10349 {
10350     int32_t rc = NO_ERROR;
10351 
10352     if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
10353         camera_metadata_ro_entry entry =
10354                 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
10355         if (0 == entry.count)
10356             return rc;
10357 
10358         uint8_t fwk_sceneMode = entry.data.u8[0];
10359 
10360         int val = lookupHalName(SCENE_MODES_MAP,
10361                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
10362                 fwk_sceneMode);
10363         if (NAME_NOT_FOUND != val) {
10364             uint8_t sceneMode = (uint8_t)val;
10365             LOGD("sceneMode: %d", sceneMode);
10366             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10367                     CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10368                 rc = BAD_VALUE;
10369             }
10370         }
10371     } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
10372             (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
10373         uint8_t sceneMode = CAM_SCENE_MODE_OFF;
10374         LOGD("sceneMode: %d", sceneMode);
10375         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10376                 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10377             rc = BAD_VALUE;
10378         }
10379     }
10380     return rc;
10381 }
10382 
10383 /*===========================================================================
10384  * FUNCTION   : needRotationReprocess
10385  *
10386  * DESCRIPTION: if rotation needs to be done by reprocess in pp
10387  *
10388  * PARAMETERS : none
10389  *
10390  * RETURN     : true: needed
10391  *              false: no need
10392  *==========================================================================*/
needRotationReprocess()10393 bool QCamera3HardwareInterface::needRotationReprocess()
10394 {
10395     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
10396         // current rotation is not zero, and pp has the capability to process rotation
10397         LOGH("need do reprocess for rotation");
10398         return true;
10399     }
10400 
10401     return false;
10402 }
10403 
10404 /*===========================================================================
10405  * FUNCTION   : needReprocess
10406  *
10407  * DESCRIPTION: if reprocess in needed
10408  *
10409  * PARAMETERS : none
10410  *
10411  * RETURN     : true: needed
10412  *              false: no need
10413  *==========================================================================*/
needReprocess(cam_feature_mask_t postprocess_mask)10414 bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
10415 {
10416     if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
10417         // TODO: add for ZSL HDR later
10418         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
10419         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
10420             LOGH("need do reprocess for ZSL WNR or min PP reprocess");
10421             return true;
10422         } else {
10423             LOGH("already post processed frame");
10424             return false;
10425         }
10426     }
10427     return needRotationReprocess();
10428 }
10429 
10430 /*===========================================================================
10431  * FUNCTION   : needJpegExifRotation
10432  *
10433  * DESCRIPTION: if rotation from jpeg is needed
10434  *
10435  * PARAMETERS : none
10436  *
10437  * RETURN     : true: needed
10438  *              false: no need
10439  *==========================================================================*/
needJpegExifRotation()10440 bool QCamera3HardwareInterface::needJpegExifRotation()
10441 {
10442    /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
10443     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10444        LOGD("Need use Jpeg EXIF Rotation");
10445        return true;
10446     }
10447     return false;
10448 }
10449 
10450 /*===========================================================================
10451  * FUNCTION   : addOfflineReprocChannel
10452  *
10453  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
10454  *              coming from input channel
10455  *
10456  * PARAMETERS :
10457  *   @config  : reprocess configuration
10458  *   @inputChHandle : pointer to the input (source) channel
10459  *
10460  *
10461  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
10462  *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3ProcessingChannel * inputChHandle)10463 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
10464         const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
10465 {
10466     int32_t rc = NO_ERROR;
10467     QCamera3ReprocessChannel *pChannel = NULL;
10468 
10469     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
10470             mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
10471             config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
10472     if (NULL == pChannel) {
10473         LOGE("no mem for reprocess channel");
10474         return NULL;
10475     }
10476 
10477     rc = pChannel->initialize(IS_TYPE_NONE);
10478     if (rc != NO_ERROR) {
10479         LOGE("init reprocess channel failed, ret = %d", rc);
10480         delete pChannel;
10481         return NULL;
10482     }
10483 
10484     // pp feature config
10485     cam_pp_feature_config_t pp_config;
10486     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
10487 
10488     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
10489     if (gCamCapability[mCameraId]->qcom_supported_feature_mask
10490             & CAM_QCOM_FEATURE_DSDN) {
10491         //Use CPP CDS incase h/w supports it.
10492         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
10493         pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
10494     }
10495     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10496         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
10497     }
10498 
10499     rc = pChannel->addReprocStreamsFromSource(pp_config,
10500             config,
10501             IS_TYPE_NONE,
10502             mMetadataChannel);
10503 
10504     if (rc != NO_ERROR) {
10505         delete pChannel;
10506         return NULL;
10507     }
10508     return pChannel;
10509 }
10510 
10511 /*===========================================================================
10512  * FUNCTION   : getMobicatMask
10513  *
10514  * DESCRIPTION: returns mobicat mask
10515  *
10516  * PARAMETERS : none
10517  *
10518  * RETURN     : mobicat mask
10519  *
10520  *==========================================================================*/
getMobicatMask()10521 uint8_t QCamera3HardwareInterface::getMobicatMask()
10522 {
10523     return m_MobicatMask;
10524 }
10525 
10526 /*===========================================================================
10527  * FUNCTION   : setMobicat
10528  *
10529  * DESCRIPTION: set Mobicat on/off.
10530  *
10531  * PARAMETERS :
10532  *   @params  : none
10533  *
10534  * RETURN     : int32_t type of status
10535  *              NO_ERROR  -- success
10536  *              none-zero failure code
10537  *==========================================================================*/
setMobicat()10538 int32_t QCamera3HardwareInterface::setMobicat()
10539 {
10540     char value [PROPERTY_VALUE_MAX];
10541     property_get("persist.camera.mobicat", value, "0");
10542     int32_t ret = NO_ERROR;
10543     uint8_t enableMobi = (uint8_t)atoi(value);
10544 
10545     if (enableMobi) {
10546         tune_cmd_t tune_cmd;
10547         tune_cmd.type = SET_RELOAD_CHROMATIX;
10548         tune_cmd.module = MODULE_ALL;
10549         tune_cmd.value = TRUE;
10550         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10551                 CAM_INTF_PARM_SET_VFE_COMMAND,
10552                 tune_cmd);
10553 
10554         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10555                 CAM_INTF_PARM_SET_PP_COMMAND,
10556                 tune_cmd);
10557     }
10558     m_MobicatMask = enableMobi;
10559 
10560     return ret;
10561 }
10562 
10563 /*===========================================================================
10564 * FUNCTION   : getLogLevel
10565 *
10566 * DESCRIPTION: Reads the log level property into a variable
10567 *
10568 * PARAMETERS :
10569 *   None
10570 *
10571 * RETURN     :
10572 *   None
10573 *==========================================================================*/
getLogLevel()10574 void QCamera3HardwareInterface::getLogLevel()
10575 {
10576     char prop[PROPERTY_VALUE_MAX];
10577     uint32_t globalLogLevel = 0;
10578 
10579     property_get("persist.camera.hal.debug", prop, "0");
10580     int val = atoi(prop);
10581     if (0 <= val) {
10582         gCamHal3LogLevel = (uint32_t)val;
10583     }
10584 
10585     property_get("persist.camera.kpi.debug", prop, "1");
10586     gKpiDebugLevel = atoi(prop);
10587 
10588     property_get("persist.camera.global.debug", prop, "0");
10589     val = atoi(prop);
10590     if (0 <= val) {
10591         globalLogLevel = (uint32_t)val;
10592     }
10593 
10594     /* Highest log level among hal.logs and global.logs is selected */
10595     if (gCamHal3LogLevel < globalLogLevel)
10596         gCamHal3LogLevel = globalLogLevel;
10597 
10598     return;
10599 }
10600 
10601 /*===========================================================================
10602  * FUNCTION   : validateStreamRotations
10603  *
10604  * DESCRIPTION: Check if the rotations requested are supported
10605  *
10606  * PARAMETERS :
10607  *   @stream_list : streams to be configured
10608  *
10609  * RETURN     : NO_ERROR on success
10610  *              -EINVAL on failure
10611  *
10612  *==========================================================================*/
validateStreamRotations(camera3_stream_configuration_t * streamList)10613 int QCamera3HardwareInterface::validateStreamRotations(
10614         camera3_stream_configuration_t *streamList)
10615 {
10616     int rc = NO_ERROR;
10617 
10618     /*
10619     * Loop through all streams requested in configuration
10620     * Check if unsupported rotations have been requested on any of them
10621     */
10622     for (size_t j = 0; j < streamList->num_streams; j++){
10623         camera3_stream_t *newStream = streamList->streams[j];
10624 
10625         bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
10626         bool isImplDef = (newStream->format ==
10627                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
10628         bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
10629                 isImplDef);
10630 
10631         if (isRotated && (!isImplDef || isZsl)) {
10632             LOGE("Error: Unsupported rotation of %d requested for stream"
10633                     "type:%d and stream format:%d",
10634                     newStream->rotation, newStream->stream_type,
10635                     newStream->format);
10636             rc = -EINVAL;
10637             break;
10638         }
10639     }
10640 
10641     return rc;
10642 }
10643 
10644 /*===========================================================================
10645 * FUNCTION   : getFlashInfo
10646 *
10647 * DESCRIPTION: Retrieve information about whether the device has a flash.
10648 *
10649 * PARAMETERS :
10650 *   @cameraId  : Camera id to query
10651 *   @hasFlash  : Boolean indicating whether there is a flash device
10652 *                associated with given camera
10653 *   @flashNode : If a flash device exists, this will be its device node.
10654 *
10655 * RETURN     :
10656 *   None
10657 *==========================================================================*/
getFlashInfo(const int cameraId,bool & hasFlash,char (& flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])10658 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
10659         bool& hasFlash,
10660         char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
10661 {
10662     cam_capability_t* camCapability = gCamCapability[cameraId];
10663     if (NULL == camCapability) {
10664         hasFlash = false;
10665         flashNode[0] = '\0';
10666     } else {
10667         hasFlash = camCapability->flash_available;
10668         strlcpy(flashNode,
10669                 (char*)camCapability->flash_dev_name,
10670                 QCAMERA_MAX_FILEPATH_LENGTH);
10671     }
10672 }
10673 
10674 /*===========================================================================
10675 * FUNCTION   : getEepromVersionInfo
10676 *
10677 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
10678 *
10679 * PARAMETERS : None
10680 *
10681 * RETURN     : string describing EEPROM version
10682 *              "\0" if no such info available
10683 *==========================================================================*/
getEepromVersionInfo()10684 const char *QCamera3HardwareInterface::getEepromVersionInfo()
10685 {
10686     return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
10687 }
10688 
10689 /*===========================================================================
10690 * FUNCTION   : getLdafCalib
10691 *
10692 * DESCRIPTION: Retrieve Laser AF calibration data
10693 *
10694 * PARAMETERS : None
10695 *
10696 * RETURN     : Two uint32_t describing laser AF calibration data
10697 *              NULL if none is available.
10698 *==========================================================================*/
getLdafCalib()10699 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
10700 {
10701     if (mLdafCalibExist) {
10702         return &mLdafCalib[0];
10703     } else {
10704         return NULL;
10705     }
10706 }
10707 
10708 /*===========================================================================
10709  * FUNCTION   : dynamicUpdateMetaStreamInfo
10710  *
10711  * DESCRIPTION: This function:
10712  *             (1) stops all the channels
10713  *             (2) returns error on pending requests and buffers
10714  *             (3) sends metastream_info in setparams
10715  *             (4) starts all channels
10716  *             This is useful when sensor has to be restarted to apply any
10717  *             settings such as frame rate from a different sensor mode
10718  *
10719  * PARAMETERS : None
10720  *
10721  * RETURN     : NO_ERROR on success
10722  *              Error codes on failure
10723  *
10724  *==========================================================================*/
dynamicUpdateMetaStreamInfo()10725 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
10726 {
10727     ATRACE_CALL();
10728     int rc = NO_ERROR;
10729 
10730     LOGD("E");
10731 
10732     rc = stopAllChannels();
10733     if (rc < 0) {
10734         LOGE("stopAllChannels failed");
10735         return rc;
10736     }
10737 
10738     rc = notifyErrorForPendingRequests();
10739     if (rc < 0) {
10740         LOGE("notifyErrorForPendingRequests failed");
10741         return rc;
10742     }
10743 
10744     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
10745         LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
10746                 "Format:%d",
10747                 mStreamConfigInfo.type[i],
10748                 mStreamConfigInfo.stream_sizes[i].width,
10749                 mStreamConfigInfo.stream_sizes[i].height,
10750                 mStreamConfigInfo.postprocess_mask[i],
10751                 mStreamConfigInfo.format[i]);
10752     }
10753 
10754     /* Send meta stream info once again so that ISP can start */
10755     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10756             CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
10757     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
10758             mParameters);
10759     if (rc < 0) {
10760         LOGE("set Metastreaminfo failed. Sensor mode does not change");
10761     }
10762 
10763     rc = startAllChannels();
10764     if (rc < 0) {
10765         LOGE("startAllChannels failed");
10766         return rc;
10767     }
10768 
10769     LOGD("X");
10770     return rc;
10771 }
10772 
10773 /*===========================================================================
10774  * FUNCTION   : stopAllChannels
10775  *
10776  * DESCRIPTION: This function stops (equivalent to stream-off) all channels
10777  *
10778  * PARAMETERS : None
10779  *
10780  * RETURN     : NO_ERROR on success
10781  *              Error codes on failure
10782  *
10783  *==========================================================================*/
stopAllChannels()10784 int32_t QCamera3HardwareInterface::stopAllChannels()
10785 {
10786     int32_t rc = NO_ERROR;
10787 
10788     LOGD("Stopping all channels");
10789     // Stop the Streams/Channels
10790     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10791         it != mStreamInfo.end(); it++) {
10792         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10793         if (channel) {
10794             channel->stop();
10795         }
10796         (*it)->status = INVALID;
10797     }
10798 
10799     if (mSupportChannel) {
10800         mSupportChannel->stop();
10801     }
10802     if (mAnalysisChannel) {
10803         mAnalysisChannel->stop();
10804     }
10805     if (mRawDumpChannel) {
10806         mRawDumpChannel->stop();
10807     }
10808     if (mMetadataChannel) {
10809         /* If content of mStreamInfo is not 0, there is metadata stream */
10810         mMetadataChannel->stop();
10811     }
10812 
10813     LOGD("All channels stopped");
10814     return rc;
10815 }
10816 
10817 /*===========================================================================
10818  * FUNCTION   : startAllChannels
10819  *
10820  * DESCRIPTION: This function starts (equivalent to stream-on) all channels
10821  *
10822  * PARAMETERS : None
10823  *
10824  * RETURN     : NO_ERROR on success
10825  *              Error codes on failure
10826  *
10827  *==========================================================================*/
startAllChannels()10828 int32_t QCamera3HardwareInterface::startAllChannels()
10829 {
10830     int32_t rc = NO_ERROR;
10831 
10832     LOGD("Start all channels ");
10833     // Start the Streams/Channels
10834     if (mMetadataChannel) {
10835         /* If content of mStreamInfo is not 0, there is metadata stream */
10836         rc = mMetadataChannel->start();
10837         if (rc < 0) {
10838             LOGE("META channel start failed");
10839             return rc;
10840         }
10841     }
10842     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10843         it != mStreamInfo.end(); it++) {
10844         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10845         if (channel) {
10846             rc = channel->start();
10847             if (rc < 0) {
10848                 LOGE("channel start failed");
10849                 return rc;
10850             }
10851         }
10852     }
10853     if (mAnalysisChannel) {
10854         mAnalysisChannel->start();
10855     }
10856     if (mSupportChannel) {
10857         rc = mSupportChannel->start();
10858         if (rc < 0) {
10859             LOGE("Support channel start failed");
10860             return rc;
10861         }
10862     }
10863     if (mRawDumpChannel) {
10864         rc = mRawDumpChannel->start();
10865         if (rc < 0) {
10866             LOGE("RAW dump channel start failed");
10867             return rc;
10868         }
10869     }
10870 
10871     LOGD("All channels started");
10872     return rc;
10873 }
10874 
10875 /*===========================================================================
10876  * FUNCTION   : notifyErrorForPendingRequests
10877  *
10878  * DESCRIPTION: This function sends error for all the pending requests/buffers
10879  *
10880  * PARAMETERS : None
10881  *
10882  * RETURN     : Error codes
10883  *              NO_ERROR on success
10884  *
10885  *==========================================================================*/
notifyErrorForPendingRequests()10886 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
10887 {
10888     int32_t rc = NO_ERROR;
10889     unsigned int frameNum = 0;
10890     camera3_capture_result_t result;
10891     camera3_stream_buffer_t *pStream_Buf = NULL;
10892 
10893     memset(&result, 0, sizeof(camera3_capture_result_t));
10894 
10895     if (mPendingRequestsList.size() > 0) {
10896         pendingRequestIterator i = mPendingRequestsList.begin();
10897         frameNum = i->frame_number;
10898     } else {
10899         /* There might still be pending buffers even though there are
10900          no pending requests. Setting the frameNum to MAX so that
10901          all the buffers with smaller frame numbers are returned */
10902         frameNum = UINT_MAX;
10903     }
10904 
10905     LOGH("Oldest frame num on mPendingRequestsList = %u",
10906        frameNum);
10907 
10908     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
10909             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
10910 
10911         if (req->frame_number < frameNum) {
10912             // Send Error notify to frameworks for each buffer for which
10913             // metadata buffer is already sent
10914             LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
10915                 req->frame_number, req->mPendingBufferList.size());
10916 
10917             pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10918             if (NULL == pStream_Buf) {
10919                 LOGE("No memory for pending buffers array");
10920                 return NO_MEMORY;
10921             }
10922             memset(pStream_Buf, 0,
10923                 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10924             result.result = NULL;
10925             result.frame_number = req->frame_number;
10926             result.num_output_buffers = req->mPendingBufferList.size();
10927             result.output_buffers = pStream_Buf;
10928 
10929             size_t index = 0;
10930             for (auto info = req->mPendingBufferList.begin();
10931                 info != req->mPendingBufferList.end(); ) {
10932 
10933                 camera3_notify_msg_t notify_msg;
10934                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10935                 notify_msg.type = CAMERA3_MSG_ERROR;
10936                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
10937                 notify_msg.message.error.error_stream = info->stream;
10938                 notify_msg.message.error.frame_number = req->frame_number;
10939                 pStream_Buf[index].acquire_fence = -1;
10940                 pStream_Buf[index].release_fence = -1;
10941                 pStream_Buf[index].buffer = info->buffer;
10942                 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10943                 pStream_Buf[index].stream = info->stream;
10944                 mCallbackOps->notify(mCallbackOps, &notify_msg);
10945                 index++;
10946                 // Remove buffer from list
10947                 info = req->mPendingBufferList.erase(info);
10948             }
10949 
10950             // Remove this request from Map
10951             LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10952                 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10953             req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10954 
10955             mCallbackOps->process_capture_result(mCallbackOps, &result);
10956 
10957             delete [] pStream_Buf;
10958         } else {
10959 
10960             // Go through the pending requests info and send error request to framework
10961             LOGE("Sending ERROR REQUEST for all pending requests");
10962             pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
10963 
10964             LOGE("Sending ERROR REQUEST for frame %d", req->frame_number);
10965 
10966             // Send error notify to frameworks
10967             camera3_notify_msg_t notify_msg;
10968             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10969             notify_msg.type = CAMERA3_MSG_ERROR;
10970             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
10971             notify_msg.message.error.error_stream = NULL;
10972             notify_msg.message.error.frame_number = req->frame_number;
10973             mCallbackOps->notify(mCallbackOps, &notify_msg);
10974 
10975             pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10976             if (NULL == pStream_Buf) {
10977                 LOGE("No memory for pending buffers array");
10978                 return NO_MEMORY;
10979             }
10980             memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10981 
10982             result.result = NULL;
10983             result.frame_number = req->frame_number;
10984             result.input_buffer = i->input_buffer;
10985             result.num_output_buffers = req->mPendingBufferList.size();
10986             result.output_buffers = pStream_Buf;
10987 
10988             size_t index = 0;
10989             for (auto info = req->mPendingBufferList.begin();
10990                 info != req->mPendingBufferList.end(); ) {
10991                 pStream_Buf[index].acquire_fence = -1;
10992                 pStream_Buf[index].release_fence = -1;
10993                 pStream_Buf[index].buffer = info->buffer;
10994                 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10995                 pStream_Buf[index].stream = info->stream;
10996                 index++;
10997                 // Remove buffer from list
10998                 info = req->mPendingBufferList.erase(info);
10999             }
11000 
11001             // Remove this request from Map
11002             LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
11003                 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
11004             req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
11005 
11006             mCallbackOps->process_capture_result(mCallbackOps, &result);
11007             delete [] pStream_Buf;
11008             i = erasePendingRequest(i);
11009         }
11010     }
11011 
11012     /* Reset pending frame Drop list and requests list */
11013     mPendingFrameDropList.clear();
11014 
11015     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
11016         req.mPendingBufferList.clear();
11017     }
11018     mPendingBuffersMap.mPendingBuffersInRequest.clear();
11019     mPendingReprocessResultList.clear();
11020     LOGH("Cleared all the pending buffers ");
11021 
11022     return rc;
11023 }
11024 
isOnEncoder(const cam_dimension_t max_viewfinder_size,uint32_t width,uint32_t height)11025 bool QCamera3HardwareInterface::isOnEncoder(
11026         const cam_dimension_t max_viewfinder_size,
11027         uint32_t width, uint32_t height)
11028 {
11029     return (width > (uint32_t)max_viewfinder_size.width ||
11030             height > (uint32_t)max_viewfinder_size.height);
11031 }
11032 
11033 /*===========================================================================
11034  * FUNCTION   : setBundleInfo
11035  *
11036  * DESCRIPTION: Set bundle info for all streams that are bundle.
11037  *
11038  * PARAMETERS : None
11039  *
11040  * RETURN     : NO_ERROR on success
11041  *              Error codes on failure
11042  *==========================================================================*/
setBundleInfo()11043 int32_t QCamera3HardwareInterface::setBundleInfo()
11044 {
11045     int32_t rc = NO_ERROR;
11046 
11047     if (mChannelHandle) {
11048         cam_bundle_config_t bundleInfo;
11049         memset(&bundleInfo, 0, sizeof(bundleInfo));
11050         rc = mCameraHandle->ops->get_bundle_info(
11051                 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
11052         if (rc != NO_ERROR) {
11053             LOGE("get_bundle_info failed");
11054             return rc;
11055         }
11056         if (mAnalysisChannel) {
11057             mAnalysisChannel->setBundleInfo(bundleInfo);
11058         }
11059         if (mSupportChannel) {
11060             mSupportChannel->setBundleInfo(bundleInfo);
11061         }
11062         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
11063                 it != mStreamInfo.end(); it++) {
11064             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
11065             channel->setBundleInfo(bundleInfo);
11066         }
11067         if (mRawDumpChannel) {
11068             mRawDumpChannel->setBundleInfo(bundleInfo);
11069         }
11070     }
11071 
11072     return rc;
11073 }
11074 
11075 /*===========================================================================
11076  * FUNCTION   : get_num_overall_buffers
11077  *
11078  * DESCRIPTION: Estimate number of pending buffers across all requests.
11079  *
11080  * PARAMETERS : None
11081  *
11082  * RETURN     : Number of overall pending buffers
11083  *
11084  *==========================================================================*/
get_num_overall_buffers()11085 uint32_t PendingBuffersMap::get_num_overall_buffers()
11086 {
11087     uint32_t sum_buffers = 0;
11088     for (auto &req : mPendingBuffersInRequest) {
11089         sum_buffers += req.mPendingBufferList.size();
11090     }
11091     return sum_buffers;
11092 }
11093 
11094 /*===========================================================================
11095  * FUNCTION   : removeBuf
11096  *
11097  * DESCRIPTION: Remove a matching buffer from tracker.
11098  *
11099  * PARAMETERS : @buffer: image buffer for the callback
11100  *
11101  * RETURN     : None
11102  *
11103  *==========================================================================*/
removeBuf(buffer_handle_t * buffer)11104 void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
11105 {
11106     bool buffer_found = false;
11107     for (auto req = mPendingBuffersInRequest.begin();
11108             req != mPendingBuffersInRequest.end(); req++) {
11109         for (auto k = req->mPendingBufferList.begin();
11110                 k != req->mPendingBufferList.end(); k++ ) {
11111             if (k->buffer == buffer) {
11112                 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
11113                         req->frame_number, buffer);
11114                 k = req->mPendingBufferList.erase(k);
11115                 if (req->mPendingBufferList.empty()) {
11116                     // Remove this request from Map
11117                     req = mPendingBuffersInRequest.erase(req);
11118                 }
11119                 buffer_found = true;
11120                 break;
11121             }
11122         }
11123         if (buffer_found) {
11124             break;
11125         }
11126     }
11127     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
11128             get_num_overall_buffers());
11129 }
11130 
11131 /*===========================================================================
11132  * FUNCTION   : getBufErrStatus
11133  *
11134  * DESCRIPTION: get buffer error status
11135  *
11136  * PARAMETERS : @buffer: buffer handle
11137  *
11138  * RETURN     : None
11139  *
11140  *==========================================================================*/
getBufErrStatus(buffer_handle_t * buffer)11141 int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
11142 {
11143     for (auto& req : mPendingBuffersInRequest) {
11144         for (auto& k : req.mPendingBufferList) {
11145             if (k.buffer == buffer)
11146                 return k.bufStatus;
11147         }
11148     }
11149     return CAMERA3_BUFFER_STATUS_OK;
11150 }
11151 
11152 /*===========================================================================
11153  * FUNCTION   : setPAAFSupport
11154  *
11155  * DESCRIPTION: Set the preview-assisted auto focus support bit in
11156  *              feature mask according to stream type and filter
11157  *              arrangement
11158  *
11159  * PARAMETERS : @feature_mask: current feature mask, which may be modified
11160  *              @stream_type: stream type
11161  *              @filter_arrangement: filter arrangement
11162  *
11163  * RETURN     : None
11164  *==========================================================================*/
setPAAFSupport(cam_feature_mask_t & feature_mask,cam_stream_type_t stream_type,cam_color_filter_arrangement_t filter_arrangement)11165 void QCamera3HardwareInterface::setPAAFSupport(
11166         cam_feature_mask_t& feature_mask,
11167         cam_stream_type_t stream_type,
11168         cam_color_filter_arrangement_t filter_arrangement)
11169 {
11170     switch (filter_arrangement) {
11171     case CAM_FILTER_ARRANGEMENT_RGGB:
11172     case CAM_FILTER_ARRANGEMENT_GRBG:
11173     case CAM_FILTER_ARRANGEMENT_GBRG:
11174     case CAM_FILTER_ARRANGEMENT_BGGR:
11175         if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
11176                 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
11177             feature_mask |= CAM_QCOM_FEATURE_PAAF;
11178         }
11179         break;
11180     case CAM_FILTER_ARRANGEMENT_Y:
11181         if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
11182             feature_mask |= CAM_QCOM_FEATURE_PAAF;
11183         }
11184         break;
11185     default:
11186         break;
11187     }
11188     LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
11189             feature_mask, stream_type, filter_arrangement);
11190 
11191 
11192 }
11193 
11194 /*===========================================================================
11195  * FUNCTION   : adjustBlackLevelForCFA
11196  *
11197  * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
11198  *              of bayer CFA (Color Filter Array).
11199  *
11200  * PARAMETERS : @input: black level pattern in the order of RGGB
11201  *              @output: black level pattern in the order of CFA
11202  *              @color_arrangement: CFA color arrangement
11203  *
11204  * RETURN     : None
11205  *==========================================================================*/
11206 template<typename T>
adjustBlackLevelForCFA(T input[BLACK_LEVEL_PATTERN_CNT],T output[BLACK_LEVEL_PATTERN_CNT],cam_color_filter_arrangement_t color_arrangement)11207 void QCamera3HardwareInterface::adjustBlackLevelForCFA(
11208         T input[BLACK_LEVEL_PATTERN_CNT],
11209         T output[BLACK_LEVEL_PATTERN_CNT],
11210         cam_color_filter_arrangement_t color_arrangement)
11211 {
11212     switch (color_arrangement) {
11213     case CAM_FILTER_ARRANGEMENT_GRBG:
11214         output[0] = input[1];
11215         output[1] = input[0];
11216         output[2] = input[3];
11217         output[3] = input[2];
11218         break;
11219     case CAM_FILTER_ARRANGEMENT_GBRG:
11220         output[0] = input[2];
11221         output[1] = input[3];
11222         output[2] = input[0];
11223         output[3] = input[1];
11224         break;
11225     case CAM_FILTER_ARRANGEMENT_BGGR:
11226         output[0] = input[3];
11227         output[1] = input[2];
11228         output[2] = input[1];
11229         output[3] = input[0];
11230         break;
11231     case CAM_FILTER_ARRANGEMENT_RGGB:
11232         output[0] = input[0];
11233         output[1] = input[1];
11234         output[2] = input[2];
11235         output[3] = input[3];
11236         break;
11237     default:
11238         LOGE("Invalid color arrangement to derive dynamic blacklevel");
11239         break;
11240     }
11241 }
11242 
11243 /*===========================================================================
11244  * FUNCTION   : is60HzZone
11245  *
11246  * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
11247  *
11248  * PARAMETERS : None
11249  *
11250  * RETURN     : True if in 60Hz zone, False otherwise
11251  *==========================================================================*/
is60HzZone()11252 bool QCamera3HardwareInterface::is60HzZone()
11253 {
11254     time_t t = time(NULL);
11255     struct tm lt;
11256 
11257     struct tm* r = localtime_r(&t, &lt);
11258 
11259     if (r == NULL || lt.tm_gmtoff <=  -2*60*60 || lt.tm_gmtoff >= 8*60*60)
11260         return true;
11261     else
11262         return false;
11263 }
11264 }; //end namespace qcamera
11265