• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define LOG_TAG "QCamera3HWI"
31 //#define LOG_NDEBUG 0
32 
33 #define __STDC_LIMIT_MACROS
34 
35 // To remove
36 #include <cutils/properties.h>
37 
38 // System dependencies
39 #include <dlfcn.h>
40 #include <fcntl.h>
41 #include <stdio.h>
42 #include <stdlib.h>
43 #include <time.h>
44 #include <sync/sync.h>
45 #include "gralloc_priv.h"
46 
47 // Display dependencies
48 #include "qdMetaData.h"
49 
50 // Camera dependencies
51 #include "android/QCamera3External.h"
52 #include "util/QCameraFlash.h"
53 #include "QCamera3HWI.h"
54 #include "QCamera3VendorTags.h"
55 #include "QCameraTrace.h"
56 
57 extern "C" {
58 #include "mm_camera_dbg.h"
59 }
60 
61 using namespace android;
62 
63 namespace qcamera {
64 
65 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
66 
67 #define EMPTY_PIPELINE_DELAY 2
68 #define PARTIAL_RESULT_COUNT 2
69 #define FRAME_SKIP_DELAY     0
70 
71 #define MAX_VALUE_8BIT ((1<<8)-1)
72 #define MAX_VALUE_10BIT ((1<<10)-1)
73 #define MAX_VALUE_12BIT ((1<<12)-1)
74 
75 #define VIDEO_4K_WIDTH  3840
76 #define VIDEO_4K_HEIGHT 2160
77 
78 #define MAX_EIS_WIDTH 3840
79 #define MAX_EIS_HEIGHT 2160
80 
81 #define MAX_RAW_STREAMS        1
82 #define MAX_STALLING_STREAMS   1
83 #define MAX_PROCESSED_STREAMS  3
84 /* Batch mode is enabled only if FPS set is equal to or greater than this */
85 #define MIN_FPS_FOR_BATCH_MODE (120)
86 #define PREVIEW_FPS_FOR_HFR    (30)
87 #define DEFAULT_VIDEO_FPS      (30.0)
88 #define TEMPLATE_MAX_PREVIEW_FPS (30.0)
89 #define MAX_HFR_BATCH_SIZE     (8)
90 #define REGIONS_TUPLE_COUNT    5
91 #define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
92 // Set a threshold for detection of missing buffers //seconds
93 #define MISSING_REQUEST_BUF_TIMEOUT 3
94 #define FLUSH_TIMEOUT 3
95 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
96 
97 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
98                                               CAM_QCOM_FEATURE_CROP |\
99                                               CAM_QCOM_FEATURE_ROTATION |\
100                                               CAM_QCOM_FEATURE_SHARPNESS |\
101                                               CAM_QCOM_FEATURE_SCALE |\
102                                               CAM_QCOM_FEATURE_CAC |\
103                                               CAM_QCOM_FEATURE_CDS )
104 /* Per configuration size for static metadata length*/
105 #define PER_CONFIGURATION_SIZE_3 (3)
106 
107 #define TIMEOUT_NEVER -1
108 
109 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
110 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
111 extern pthread_mutex_t gCamLock;
112 volatile uint32_t gCamHal3LogLevel = 1;
113 extern uint8_t gNumCameraSessions;
114 
115 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
116     {"On",  CAM_CDS_MODE_ON},
117     {"Off", CAM_CDS_MODE_OFF},
118     {"Auto",CAM_CDS_MODE_AUTO}
119 };
120 
121 const QCamera3HardwareInterface::QCameraMap<
122         camera_metadata_enum_android_control_effect_mode_t,
123         cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
124     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
125     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
126     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
127     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
128     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
129     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
130     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
131     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
132     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
133 };
134 
135 const QCamera3HardwareInterface::QCameraMap<
136         camera_metadata_enum_android_control_awb_mode_t,
137         cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
138     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
139     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
140     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
141     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
142     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
143     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
144     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
145     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
146     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
147 };
148 
149 const QCamera3HardwareInterface::QCameraMap<
150         camera_metadata_enum_android_control_scene_mode_t,
151         cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
152     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
153     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
154     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
155     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
156     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
157     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
158     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
159     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
160     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
161     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
162     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
163     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
164     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
165     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
166     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
167     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
168 };
169 
170 const QCamera3HardwareInterface::QCameraMap<
171         camera_metadata_enum_android_control_af_mode_t,
172         cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
173     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
174     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
175     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
176     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
177     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
178     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
179     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
180 };
181 
182 const QCamera3HardwareInterface::QCameraMap<
183         camera_metadata_enum_android_color_correction_aberration_mode_t,
184         cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
185     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
186             CAM_COLOR_CORRECTION_ABERRATION_OFF },
187     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
188             CAM_COLOR_CORRECTION_ABERRATION_FAST },
189     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
190             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
191 };
192 
193 const QCamera3HardwareInterface::QCameraMap<
194         camera_metadata_enum_android_control_ae_antibanding_mode_t,
195         cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
196     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
197     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
198     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
199     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
200 };
201 
202 const QCamera3HardwareInterface::QCameraMap<
203         camera_metadata_enum_android_control_ae_mode_t,
204         cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
205     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
206     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
207     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
208     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
209     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
210 };
211 
212 const QCamera3HardwareInterface::QCameraMap<
213         camera_metadata_enum_android_flash_mode_t,
214         cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
215     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
216     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
217     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
218 };
219 
220 const QCamera3HardwareInterface::QCameraMap<
221         camera_metadata_enum_android_statistics_face_detect_mode_t,
222         cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
223     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
224     { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
225     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
226 };
227 
228 const QCamera3HardwareInterface::QCameraMap<
229         camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
230         cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
231     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
232       CAM_FOCUS_UNCALIBRATED },
233     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
234       CAM_FOCUS_APPROXIMATE },
235     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
236       CAM_FOCUS_CALIBRATED }
237 };
238 
239 const QCamera3HardwareInterface::QCameraMap<
240         camera_metadata_enum_android_lens_state_t,
241         cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
242     { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
243     { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
244 };
245 
246 const int32_t available_thumbnail_sizes[] = {0, 0,
247                                              176, 144,
248                                              240, 144,
249                                              256, 144,
250                                              240, 160,
251                                              256, 154,
252                                              240, 240,
253                                              320, 240};
254 
255 const QCamera3HardwareInterface::QCameraMap<
256         camera_metadata_enum_android_sensor_test_pattern_mode_t,
257         cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
258     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
259     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
260     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
261     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
262     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
263     { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
264 };
265 
266 /* Since there is no mapping for all the options some Android enum are not listed.
267  * Also, the order in this list is important because while mapping from HAL to Android it will
268  * traverse from lower to higher index which means that for HAL values that are map to different
269  * Android values, the traverse logic will select the first one found.
270  */
271 const QCamera3HardwareInterface::QCameraMap<
272         camera_metadata_enum_android_sensor_reference_illuminant1_t,
273         cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
274     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
275     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
276     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
277     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
278     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
279     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
280     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
281     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
282     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
283     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
284     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
285     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
286     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
287     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
288     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
289     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
290 };
291 
292 const QCamera3HardwareInterface::QCameraMap<
293         int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
294     { 60, CAM_HFR_MODE_60FPS},
295     { 90, CAM_HFR_MODE_90FPS},
296     { 120, CAM_HFR_MODE_120FPS},
297     { 150, CAM_HFR_MODE_150FPS},
298     { 180, CAM_HFR_MODE_180FPS},
299     { 210, CAM_HFR_MODE_210FPS},
300     { 240, CAM_HFR_MODE_240FPS},
301     { 480, CAM_HFR_MODE_480FPS},
302 };
303 
304 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
305     .initialize                         = QCamera3HardwareInterface::initialize,
306     .configure_streams                  = QCamera3HardwareInterface::configure_streams,
307     .register_stream_buffers            = NULL,
308     .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
309     .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
310     .get_metadata_vendor_tag_ops        = NULL,
311     .dump                               = QCamera3HardwareInterface::dump,
312     .flush                              = QCamera3HardwareInterface::flush,
313     .reserved                           = {0},
314 };
315 
316 // initialise to some default value
317 uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
318 
319 /*===========================================================================
320  * FUNCTION   : QCamera3HardwareInterface
321  *
322  * DESCRIPTION: constructor of QCamera3HardwareInterface
323  *
324  * PARAMETERS :
325  *   @cameraId  : camera ID
326  *
327  * RETURN     : none
328  *==========================================================================*/
QCamera3HardwareInterface(uint32_t cameraId,const camera_module_callbacks_t * callbacks)329 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
330         const camera_module_callbacks_t *callbacks)
331     : mCameraId(cameraId),
332       mCameraHandle(NULL),
333       mCameraInitialized(false),
334       mCallbackOps(NULL),
335       mMetadataChannel(NULL),
336       mPictureChannel(NULL),
337       mRawChannel(NULL),
338       mSupportChannel(NULL),
339       mAnalysisChannel(NULL),
340       mRawDumpChannel(NULL),
341       mDummyBatchChannel(NULL),
342       m_perfLock(),
343       mCommon(),
344       mChannelHandle(0),
345       mFirstConfiguration(true),
346       mFlush(false),
347       mFlushPerf(false),
348       mParamHeap(NULL),
349       mParameters(NULL),
350       mPrevParameters(NULL),
351       m_bIsVideo(false),
352       m_bIs4KVideo(false),
353       m_bEisSupportedSize(false),
354       m_bEisEnable(false),
355       m_MobicatMask(0),
356       mMinProcessedFrameDuration(0),
357       mMinJpegFrameDuration(0),
358       mMinRawFrameDuration(0),
359       mMetaFrameCount(0U),
360       mUpdateDebugLevel(false),
361       mCallbacks(callbacks),
362       mCaptureIntent(0),
363       mCacMode(0),
364       mHybridAeEnable(0),
365       /* DevCamDebug metadata internal m control*/
366       mDevCamDebugMetaEnable(0),
367       /* DevCamDebug metadata end */
368       mBatchSize(0),
369       mToBeQueuedVidBufs(0),
370       mHFRVideoFps(DEFAULT_VIDEO_FPS),
371       mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
372       mFirstFrameNumberInBatch(0),
373       mNeedSensorRestart(false),
374       mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
375       mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
376       mLdafCalibExist(false),
377       mPowerHintEnabled(false),
378       mLastCustIntentFrmNum(-1),
379       mState(CLOSED),
380       mIsDeviceLinked(false),
381       mIsMainCamera(true),
382       mLinkedCameraId(0),
383       m_pRelCamSyncHeap(NULL),
384       m_pRelCamSyncBuf(NULL)
385 {
386     getLogLevel();
387     m_perfLock.lock_init();
388     mCommon.init(gCamCapability[cameraId]);
389     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
390     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
391     mCameraDevice.common.close = close_camera_device;
392     mCameraDevice.ops = &mCameraOps;
393     mCameraDevice.priv = this;
394     gCamCapability[cameraId]->version = CAM_HAL_V3;
395     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
396     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
397     gCamCapability[cameraId]->min_num_pp_bufs = 3;
398 
399     pthread_cond_init(&mBuffersCond, NULL);
400 
401     pthread_cond_init(&mRequestCond, NULL);
402     mPendingLiveRequest = 0;
403     mCurrentRequestId = -1;
404     pthread_mutex_init(&mMutex, NULL);
405 
406     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
407         mDefaultMetadata[i] = NULL;
408 
409     // Getting system props of different kinds
410     char prop[PROPERTY_VALUE_MAX];
411     memset(prop, 0, sizeof(prop));
412     property_get("persist.camera.raw.dump", prop, "0");
413     mEnableRawDump = atoi(prop);
414     if (mEnableRawDump)
415         LOGD("Raw dump from Camera HAL enabled");
416 
417     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
418     memset(mLdafCalib, 0, sizeof(mLdafCalib));
419 
420     memset(prop, 0, sizeof(prop));
421     property_get("persist.camera.tnr.preview", prop, "0");
422     m_bTnrPreview = (uint8_t)atoi(prop);
423 
424     memset(prop, 0, sizeof(prop));
425     property_get("persist.camera.tnr.video", prop, "0");
426     m_bTnrVideo = (uint8_t)atoi(prop);
427 
428     memset(prop, 0, sizeof(prop));
429     property_get("persist.camera.avtimer.debug", prop, "0");
430     m_debug_avtimer = (uint8_t)atoi(prop);
431 
432     //Load and read GPU library.
433     lib_surface_utils = NULL;
434     LINK_get_surface_pixel_alignment = NULL;
435     mSurfaceStridePadding = CAM_PAD_TO_32;
436     lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
437     if (lib_surface_utils) {
438         *(void **)&LINK_get_surface_pixel_alignment =
439                 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
440          if (LINK_get_surface_pixel_alignment) {
441              mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
442          }
443          dlclose(lib_surface_utils);
444     }
445 
446     m60HzZone = is60HzZone();
447 }
448 
449 /*===========================================================================
450  * FUNCTION   : ~QCamera3HardwareInterface
451  *
452  * DESCRIPTION: destructor of QCamera3HardwareInterface
453  *
454  * PARAMETERS : none
455  *
456  * RETURN     : none
457  *==========================================================================*/
~QCamera3HardwareInterface()458 QCamera3HardwareInterface::~QCamera3HardwareInterface()
459 {
460     LOGD("E");
461 
462     /* Turn off current power hint before acquiring perfLock in case they
463      * conflict with each other */
464     disablePowerHint();
465 
466     m_perfLock.lock_acq();
467 
468     /* We need to stop all streams before deleting any stream */
469     if (mRawDumpChannel) {
470         mRawDumpChannel->stop();
471     }
472 
473     // NOTE: 'camera3_stream_t *' objects are already freed at
474     //        this stage by the framework
475     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
476         it != mStreamInfo.end(); it++) {
477         QCamera3ProcessingChannel *channel = (*it)->channel;
478         if (channel) {
479             channel->stop();
480         }
481     }
482     if (mSupportChannel)
483         mSupportChannel->stop();
484 
485     if (mAnalysisChannel) {
486         mAnalysisChannel->stop();
487     }
488     if (mMetadataChannel) {
489         mMetadataChannel->stop();
490     }
491     if (mChannelHandle) {
492         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
493                 mChannelHandle);
494         LOGD("stopping channel %d", mChannelHandle);
495     }
496 
497     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
498         it != mStreamInfo.end(); it++) {
499         QCamera3ProcessingChannel *channel = (*it)->channel;
500         if (channel)
501             delete channel;
502         free (*it);
503     }
504     if (mSupportChannel) {
505         delete mSupportChannel;
506         mSupportChannel = NULL;
507     }
508 
509     if (mAnalysisChannel) {
510         delete mAnalysisChannel;
511         mAnalysisChannel = NULL;
512     }
513     if (mRawDumpChannel) {
514         delete mRawDumpChannel;
515         mRawDumpChannel = NULL;
516     }
517     if (mDummyBatchChannel) {
518         delete mDummyBatchChannel;
519         mDummyBatchChannel = NULL;
520     }
521     mPictureChannel = NULL;
522 
523     if (mMetadataChannel) {
524         delete mMetadataChannel;
525         mMetadataChannel = NULL;
526     }
527 
528     /* Clean up all channels */
529     if (mCameraInitialized) {
530         if(!mFirstConfiguration){
531             //send the last unconfigure
532             cam_stream_size_info_t stream_config_info;
533             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
534             stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
535             stream_config_info.buffer_info.max_buffers =
536                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
537             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
538                     stream_config_info);
539             int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
540             if (rc < 0) {
541                 LOGE("set_parms failed for unconfigure");
542             }
543         }
544         deinitParameters();
545     }
546 
547     if (mChannelHandle) {
548         mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
549                 mChannelHandle);
550         LOGH("deleting channel %d", mChannelHandle);
551         mChannelHandle = 0;
552     }
553 
554     if (mState != CLOSED)
555         closeCamera();
556 
557     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
558         req.mPendingBufferList.clear();
559     }
560     mPendingBuffersMap.mPendingBuffersInRequest.clear();
561     mPendingReprocessResultList.clear();
562     for (pendingRequestIterator i = mPendingRequestsList.begin();
563             i != mPendingRequestsList.end();) {
564         i = erasePendingRequest(i);
565     }
566     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
567         if (mDefaultMetadata[i])
568             free_camera_metadata(mDefaultMetadata[i]);
569 
570     m_perfLock.lock_rel();
571     m_perfLock.lock_deinit();
572 
573     pthread_cond_destroy(&mRequestCond);
574 
575     pthread_cond_destroy(&mBuffersCond);
576 
577     pthread_mutex_destroy(&mMutex);
578     LOGD("X");
579 }
580 
581 /*===========================================================================
582  * FUNCTION   : erasePendingRequest
583  *
584  * DESCRIPTION: function to erase a desired pending request after freeing any
585  *              allocated memory
586  *
587  * PARAMETERS :
588  *   @i       : iterator pointing to pending request to be erased
589  *
590  * RETURN     : iterator pointing to the next request
591  *==========================================================================*/
592 QCamera3HardwareInterface::pendingRequestIterator
erasePendingRequest(pendingRequestIterator i)593         QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
594 {
595     if (i->input_buffer != NULL) {
596         free(i->input_buffer);
597         i->input_buffer = NULL;
598     }
599     if (i->settings != NULL)
600         free_camera_metadata((camera_metadata_t*)i->settings);
601     return mPendingRequestsList.erase(i);
602 }
603 
604 /*===========================================================================
605  * FUNCTION   : camEvtHandle
606  *
607  * DESCRIPTION: Function registered to mm-camera-interface to handle events
608  *
609  * PARAMETERS :
610  *   @camera_handle : interface layer camera handle
611  *   @evt           : ptr to event
612  *   @user_data     : user data ptr
613  *
614  * RETURN     : none
615  *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)616 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
617                                           mm_camera_event_t *evt,
618                                           void *user_data)
619 {
620     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
621     if (obj && evt) {
622         switch(evt->server_event_type) {
623             case CAM_EVENT_TYPE_DAEMON_DIED:
624                 pthread_mutex_lock(&obj->mMutex);
625                 obj->mState = ERROR;
626                 pthread_mutex_unlock(&obj->mMutex);
627                 LOGE("Fatal, camera daemon died");
628                 break;
629 
630             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
631                 LOGD("HAL got request pull from Daemon");
632                 pthread_mutex_lock(&obj->mMutex);
633                 obj->mWokenUpByDaemon = true;
634                 obj->unblockRequestIfNecessary();
635                 pthread_mutex_unlock(&obj->mMutex);
636                 break;
637 
638             default:
639                 LOGW("Warning: Unhandled event %d",
640                         evt->server_event_type);
641                 break;
642         }
643     } else {
644         LOGE("NULL user_data/evt");
645     }
646 }
647 
648 /*===========================================================================
649  * FUNCTION   : openCamera
650  *
651  * DESCRIPTION: open camera
652  *
653  * PARAMETERS :
654  *   @hw_device  : double ptr for camera device struct
655  *
656  * RETURN     : int32_t type of status
657  *              NO_ERROR  -- success
658  *              none-zero failure code
659  *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)660 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
661 {
662     int rc = 0;
663     if (mState != CLOSED) {
664         *hw_device = NULL;
665         return PERMISSION_DENIED;
666     }
667 
668     m_perfLock.lock_acq();
669     LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
670              mCameraId);
671 
672     rc = openCamera();
673     if (rc == 0) {
674         *hw_device = &mCameraDevice.common;
675     } else
676         *hw_device = NULL;
677 
678     m_perfLock.lock_rel();
679     LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
680              mCameraId, rc);
681 
682     if (rc == NO_ERROR) {
683         mState = OPENED;
684     }
685     return rc;
686 }
687 
688 /*===========================================================================
689  * FUNCTION   : openCamera
690  *
691  * DESCRIPTION: open camera
692  *
693  * PARAMETERS : none
694  *
695  * RETURN     : int32_t type of status
696  *              NO_ERROR  -- success
697  *              none-zero failure code
698  *==========================================================================*/
openCamera()699 int QCamera3HardwareInterface::openCamera()
700 {
701     int rc = 0;
702     char value[PROPERTY_VALUE_MAX];
703 
704     KPI_ATRACE_CALL();
705     if (mCameraHandle) {
706         LOGE("Failure: Camera already opened");
707         return ALREADY_EXISTS;
708     }
709 
710     rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
711     if (rc < 0) {
712         LOGE("Failed to reserve flash for camera id: %d",
713                 mCameraId);
714         return UNKNOWN_ERROR;
715     }
716 
717     rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
718     if (rc) {
719         LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
720         return rc;
721     }
722 
723     if (!mCameraHandle) {
724         LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
725         return -ENODEV;
726     }
727 
728     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
729             camEvtHandle, (void *)this);
730 
731     if (rc < 0) {
732         LOGE("Error, failed to register event callback");
733         /* Not closing camera here since it is already handled in destructor */
734         return FAILED_TRANSACTION;
735     }
736 
737     mExifParams.debug_params =
738             (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
739     if (mExifParams.debug_params) {
740         memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
741     } else {
742         LOGE("Out of Memory. Allocation failed for 3A debug exif params");
743         return NO_MEMORY;
744     }
745     mFirstConfiguration = true;
746 
747     //Notify display HAL that a camera session is active.
748     //But avoid calling the same during bootup because camera service might open/close
749     //cameras at boot time during its initialization and display service will also internally
750     //wait for camera service to initialize first while calling this display API, resulting in a
751     //deadlock situation. Since boot time camera open/close calls are made only to fetch
752     //capabilities, no need of this display bw optimization.
753     //Use "service.bootanim.exit" property to know boot status.
754     property_get("service.bootanim.exit", value, "0");
755     if (atoi(value) == 1) {
756         pthread_mutex_lock(&gCamLock);
757         if (gNumCameraSessions++ == 0) {
758             setCameraLaunchStatus(true);
759         }
760         pthread_mutex_unlock(&gCamLock);
761     }
762 
763     //fill the session id needed while linking dual cam
764     pthread_mutex_lock(&gCamLock);
765     rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
766         &sessionId[mCameraId]);
767     pthread_mutex_unlock(&gCamLock);
768 
769     if (rc < 0) {
770         LOGE("Error, failed to get sessiion id");
771         return UNKNOWN_ERROR;
772     } else {
773         //Allocate related cam sync buffer
774         //this is needed for the payload that goes along with bundling cmd for related
775         //camera use cases
776         m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
777         rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
778         if(rc != OK) {
779             rc = NO_MEMORY;
780             LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
781             return NO_MEMORY;
782         }
783 
784         //Map memory for related cam sync buffer
785         rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
786                 CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
787                 m_pRelCamSyncHeap->getFd(0),
788                 sizeof(cam_sync_related_sensors_event_info_t),
789                 m_pRelCamSyncHeap->getPtr(0));
790         if(rc < 0) {
791             LOGE("Dualcam: failed to map Related cam sync buffer");
792             rc = FAILED_TRANSACTION;
793             return NO_MEMORY;
794         }
795         m_pRelCamSyncBuf =
796                 (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
797     }
798 
799     LOGH("mCameraId=%d",mCameraId);
800 
801     return NO_ERROR;
802 }
803 
804 /*===========================================================================
805  * FUNCTION   : closeCamera
806  *
807  * DESCRIPTION: close camera
808  *
809  * PARAMETERS : none
810  *
811  * RETURN     : int32_t type of status
812  *              NO_ERROR  -- success
813  *              none-zero failure code
814  *==========================================================================*/
closeCamera()815 int QCamera3HardwareInterface::closeCamera()
816 {
817     KPI_ATRACE_CALL();
818     int rc = NO_ERROR;
819     char value[PROPERTY_VALUE_MAX];
820 
821     LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
822              mCameraId);
823     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
824     mCameraHandle = NULL;
825 
826     //reset session id to some invalid id
827     pthread_mutex_lock(&gCamLock);
828     sessionId[mCameraId] = 0xDEADBEEF;
829     pthread_mutex_unlock(&gCamLock);
830 
831     //Notify display HAL that there is no active camera session
832     //but avoid calling the same during bootup. Refer to openCamera
833     //for more details.
834     property_get("service.bootanim.exit", value, "0");
835     if (atoi(value) == 1) {
836         pthread_mutex_lock(&gCamLock);
837         if (--gNumCameraSessions == 0) {
838             setCameraLaunchStatus(false);
839         }
840         pthread_mutex_unlock(&gCamLock);
841     }
842 
843     if (NULL != m_pRelCamSyncHeap) {
844         m_pRelCamSyncHeap->deallocate();
845         delete m_pRelCamSyncHeap;
846         m_pRelCamSyncHeap = NULL;
847         m_pRelCamSyncBuf = NULL;
848     }
849 
850     if (mExifParams.debug_params) {
851         free(mExifParams.debug_params);
852         mExifParams.debug_params = NULL;
853     }
854     if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
855         LOGW("Failed to release flash for camera id: %d",
856                 mCameraId);
857     }
858     mState = CLOSED;
859     LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
860          mCameraId, rc);
861     return rc;
862 }
863 
864 /*===========================================================================
865  * FUNCTION   : initialize
866  *
867  * DESCRIPTION: Initialize frameworks callback functions
868  *
869  * PARAMETERS :
870  *   @callback_ops : callback function to frameworks
871  *
872  * RETURN     :
873  *
874  *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)875 int QCamera3HardwareInterface::initialize(
876         const struct camera3_callback_ops *callback_ops)
877 {
878     ATRACE_CALL();
879     int rc;
880 
881     LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
882     pthread_mutex_lock(&mMutex);
883 
884     // Validate current state
885     switch (mState) {
886         case OPENED:
887             /* valid state */
888             break;
889         default:
890             LOGE("Invalid state %d", mState);
891             rc = -ENODEV;
892             goto err1;
893     }
894 
895     rc = initParameters();
896     if (rc < 0) {
897         LOGE("initParamters failed %d", rc);
898         goto err1;
899     }
900     mCallbackOps = callback_ops;
901 
902     mChannelHandle = mCameraHandle->ops->add_channel(
903             mCameraHandle->camera_handle, NULL, NULL, this);
904     if (mChannelHandle == 0) {
905         LOGE("add_channel failed");
906         rc = -ENOMEM;
907         pthread_mutex_unlock(&mMutex);
908         return rc;
909     }
910 
911     pthread_mutex_unlock(&mMutex);
912     mCameraInitialized = true;
913     mState = INITIALIZED;
914     LOGI("X");
915     return 0;
916 
917 err1:
918     pthread_mutex_unlock(&mMutex);
919     return rc;
920 }
921 
922 /*===========================================================================
923  * FUNCTION   : validateStreamDimensions
924  *
925  * DESCRIPTION: Check if the configuration requested are those advertised
926  *
927  * PARAMETERS :
928  *   @stream_list : streams to be configured
929  *
930  * RETURN     :
931  *
932  *==========================================================================*/
validateStreamDimensions(camera3_stream_configuration_t * streamList)933 int QCamera3HardwareInterface::validateStreamDimensions(
934         camera3_stream_configuration_t *streamList)
935 {
936     int rc = NO_ERROR;
937     size_t count = 0;
938 
939     camera3_stream_t *inputStream = NULL;
940     /*
941     * Loop through all streams to find input stream if it exists*
942     */
943     for (size_t i = 0; i< streamList->num_streams; i++) {
944         if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
945             if (inputStream != NULL) {
946                 LOGE("Error, Multiple input streams requested");
947                 return -EINVAL;
948             }
949             inputStream = streamList->streams[i];
950         }
951     }
952     /*
953     * Loop through all streams requested in configuration
954     * Check if unsupported sizes have been requested on any of them
955     */
956     for (size_t j = 0; j < streamList->num_streams; j++) {
957         bool sizeFound = false;
958         camera3_stream_t *newStream = streamList->streams[j];
959 
960         uint32_t rotatedHeight = newStream->height;
961         uint32_t rotatedWidth = newStream->width;
962         if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
963                 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
964             rotatedHeight = newStream->width;
965             rotatedWidth = newStream->height;
966         }
967 
968         /*
969         * Sizes are different for each type of stream format check against
970         * appropriate table.
971         */
972         switch (newStream->format) {
973         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
974         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
975         case HAL_PIXEL_FORMAT_RAW10:
976             count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
977             for (size_t i = 0; i < count; i++) {
978                 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
979                         (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
980                     sizeFound = true;
981                     break;
982                 }
983             }
984             break;
985         case HAL_PIXEL_FORMAT_BLOB:
986             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
987             /* Verify set size against generated sizes table */
988             for (size_t i = 0; i < count; i++) {
989                 if (((int32_t)rotatedWidth ==
990                         gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
991                         ((int32_t)rotatedHeight ==
992                         gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
993                     sizeFound = true;
994                     break;
995                 }
996             }
997             break;
998         case HAL_PIXEL_FORMAT_YCbCr_420_888:
999         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1000         default:
1001             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1002                     || newStream->stream_type == CAMERA3_STREAM_INPUT
1003                     || IS_USAGE_ZSL(newStream->usage)) {
1004                 if (((int32_t)rotatedWidth ==
1005                                 gCamCapability[mCameraId]->active_array_size.width) &&
1006                                 ((int32_t)rotatedHeight ==
1007                                 gCamCapability[mCameraId]->active_array_size.height)) {
1008                     sizeFound = true;
1009                     break;
1010                 }
1011                 /* We could potentially break here to enforce ZSL stream
1012                  * set from frameworks always is full active array size
1013                  * but it is not clear from the spc if framework will always
1014                  * follow that, also we have logic to override to full array
1015                  * size, so keeping the logic lenient at the moment
1016                  */
1017             }
1018             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1019                     MAX_SIZES_CNT);
1020             for (size_t i = 0; i < count; i++) {
1021                 if (((int32_t)rotatedWidth ==
1022                             gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1023                             ((int32_t)rotatedHeight ==
1024                             gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1025                     sizeFound = true;
1026                     break;
1027                 }
1028             }
1029             break;
1030         } /* End of switch(newStream->format) */
1031 
1032         /* We error out even if a single stream has unsupported size set */
1033         if (!sizeFound) {
1034             LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1035                     rotatedWidth, rotatedHeight, newStream->format,
1036                     gCamCapability[mCameraId]->active_array_size.width,
1037                     gCamCapability[mCameraId]->active_array_size.height);
1038             rc = -EINVAL;
1039             break;
1040         }
1041     } /* End of for each stream */
1042     return rc;
1043 }
1044 
1045 /*==============================================================================
1046  * FUNCTION   : isSupportChannelNeeded
1047  *
1048  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1049  *
1050  * PARAMETERS :
1051  *   @stream_list : streams to be configured
1052  *   @stream_config_info : the config info for streams to be configured
1053  *
1054  * RETURN     : Boolen true/false decision
1055  *
1056  *==========================================================================*/
isSupportChannelNeeded(camera3_stream_configuration_t * streamList,cam_stream_size_info_t stream_config_info)1057 bool QCamera3HardwareInterface::isSupportChannelNeeded(
1058         camera3_stream_configuration_t *streamList,
1059         cam_stream_size_info_t stream_config_info)
1060 {
1061     uint32_t i;
1062     bool pprocRequested = false;
1063     /* Check for conditions where PProc pipeline does not have any streams*/
1064     for (i = 0; i < stream_config_info.num_streams; i++) {
1065         if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1066                 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1067             pprocRequested = true;
1068             break;
1069         }
1070     }
1071 
1072     if (pprocRequested == false )
1073         return true;
1074 
1075     /* Dummy stream needed if only raw or jpeg streams present */
1076     for (i = 0; i < streamList->num_streams; i++) {
1077         switch(streamList->streams[i]->format) {
1078             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1079             case HAL_PIXEL_FORMAT_RAW10:
1080             case HAL_PIXEL_FORMAT_RAW16:
1081             case HAL_PIXEL_FORMAT_BLOB:
1082                 break;
1083             default:
1084                 return false;
1085         }
1086     }
1087     return true;
1088 }
1089 
1090 /*==============================================================================
1091  * FUNCTION   : getSensorOutputSize
1092  *
1093  * DESCRIPTION: Get sensor output size based on current stream configuratoin
1094  *
1095  * PARAMETERS :
1096  *   @sensor_dim : sensor output dimension (output)
1097  *
1098  * RETURN     : int32_t type of status
1099  *              NO_ERROR  -- success
1100  *              none-zero failure code
1101  *
1102  *==========================================================================*/
getSensorOutputSize(cam_dimension_t & sensor_dim)1103 int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1104 {
1105     int32_t rc = NO_ERROR;
1106 
1107     cam_dimension_t max_dim = {0, 0};
1108     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1109         if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1110             max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1111         if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1112             max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1113     }
1114 
1115     clear_metadata_buffer(mParameters);
1116 
1117     rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1118             max_dim);
1119     if (rc != NO_ERROR) {
1120         LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1121         return rc;
1122     }
1123 
1124     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1125     if (rc != NO_ERROR) {
1126         LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1127         return rc;
1128     }
1129 
1130     clear_metadata_buffer(mParameters);
1131     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1132 
1133     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1134             mParameters);
1135     if (rc != NO_ERROR) {
1136         LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1137         return rc;
1138     }
1139 
1140     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1141     LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1142 
1143     return rc;
1144 }
1145 
1146 /*==============================================================================
1147  * FUNCTION   : enablePowerHint
1148  *
1149  * DESCRIPTION: enable single powerhint for preview and different video modes.
1150  *
1151  * PARAMETERS :
1152  *
1153  * RETURN     : NULL
1154  *
1155  *==========================================================================*/
enablePowerHint()1156 void QCamera3HardwareInterface::enablePowerHint()
1157 {
1158     if (!mPowerHintEnabled) {
1159         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1160         mPowerHintEnabled = true;
1161     }
1162 }
1163 
1164 /*==============================================================================
1165  * FUNCTION   : disablePowerHint
1166  *
1167  * DESCRIPTION: disable current powerhint.
1168  *
1169  * PARAMETERS :
1170  *
1171  * RETURN     : NULL
1172  *
1173  *==========================================================================*/
disablePowerHint()1174 void QCamera3HardwareInterface::disablePowerHint()
1175 {
1176     if (mPowerHintEnabled) {
1177         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1178         mPowerHintEnabled = false;
1179     }
1180 }
1181 
1182 /*==============================================================================
1183  * FUNCTION   : addToPPFeatureMask
1184  *
1185  * DESCRIPTION: add additional features to pp feature mask based on
1186  *              stream type and usecase
1187  *
1188  * PARAMETERS :
1189  *   @stream_format : stream type for feature mask
1190  *   @stream_idx : stream idx within postprocess_mask list to change
1191  *
1192  * RETURN     : NULL
1193  *
1194  *==========================================================================*/
addToPPFeatureMask(int stream_format,uint32_t stream_idx)1195 void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1196         uint32_t stream_idx)
1197 {
1198     char feature_mask_value[PROPERTY_VALUE_MAX];
1199     cam_feature_mask_t feature_mask;
1200     int args_converted;
1201     int property_len;
1202 
1203     /* Get feature mask from property */
1204     property_len = property_get("persist.camera.hal3.feature",
1205             feature_mask_value, "0");
1206     if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1207             (feature_mask_value[1] == 'x')) {
1208         args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1209     } else {
1210         args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1211     }
1212     if (1 != args_converted) {
1213         feature_mask = 0;
1214         LOGE("Wrong feature mask %s", feature_mask_value);
1215         return;
1216     }
1217 
1218     switch (stream_format) {
1219     case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1220         /* Add LLVD to pp feature mask only if video hint is enabled */
1221         if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1222             mStreamConfigInfo.postprocess_mask[stream_idx]
1223                     |= CAM_QTI_FEATURE_SW_TNR;
1224             LOGH("Added SW TNR to pp feature mask");
1225         } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1226             mStreamConfigInfo.postprocess_mask[stream_idx]
1227                     |= CAM_QCOM_FEATURE_LLVD;
1228             LOGH("Added LLVD SeeMore to pp feature mask");
1229         }
1230         break;
1231     }
1232     default:
1233         break;
1234     }
1235     LOGD("PP feature mask %llx",
1236             mStreamConfigInfo.postprocess_mask[stream_idx]);
1237 }
1238 
1239 /*==============================================================================
1240  * FUNCTION   : updateFpsInPreviewBuffer
1241  *
1242  * DESCRIPTION: update FPS information in preview buffer.
1243  *
1244  * PARAMETERS :
1245  *   @metadata    : pointer to metadata buffer
1246  *   @frame_number: frame_number to look for in pending buffer list
1247  *
1248  * RETURN     : None
1249  *
1250  *==========================================================================*/
updateFpsInPreviewBuffer(metadata_buffer_t * metadata,uint32_t frame_number)1251 void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1252         uint32_t frame_number)
1253 {
1254     // Mark all pending buffers for this particular request
1255     // with corresponding framerate information
1256     for (List<PendingBuffersInRequest>::iterator req =
1257             mPendingBuffersMap.mPendingBuffersInRequest.begin();
1258             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1259         for(List<PendingBufferInfo>::iterator j =
1260                 req->mPendingBufferList.begin();
1261                 j != req->mPendingBufferList.end(); j++) {
1262             QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1263             if ((req->frame_number == frame_number) &&
1264                 (channel->getStreamTypeMask() &
1265                 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1266                 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1267                     CAM_INTF_PARM_FPS_RANGE, metadata) {
1268                     int32_t cameraFps = float_range->max_fps;
1269                     struct private_handle_t *priv_handle =
1270                         (struct private_handle_t *)(*(j->buffer));
1271                     setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1272                 }
1273             }
1274         }
1275     }
1276 }
1277 
1278 /*==============================================================================
1279  * FUNCTION   : updateTimeStampInPendingBuffers
1280  *
1281  * DESCRIPTION: update timestamp in display metadata for all pending buffers
1282  *              of a frame number
1283  *
1284  * PARAMETERS :
1285  *   @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1286  *   @timestamp   : timestamp to be set
1287  *
1288  * RETURN     : None
1289  *
1290  *==========================================================================*/
updateTimeStampInPendingBuffers(uint32_t frameNumber,nsecs_t timestamp)1291 void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1292         uint32_t frameNumber, nsecs_t timestamp)
1293 {
1294     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1295             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1296         if (req->frame_number != frameNumber)
1297             continue;
1298 
1299         for (auto k = req->mPendingBufferList.begin();
1300                 k != req->mPendingBufferList.end(); k++ ) {
1301             struct private_handle_t *priv_handle =
1302                     (struct private_handle_t *) (*(k->buffer));
1303             setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1304         }
1305     }
1306     return;
1307 }
1308 
1309 /*===========================================================================
1310  * FUNCTION   : configureStreams
1311  *
1312  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1313  *              and output streams.
1314  *
1315  * PARAMETERS :
1316  *   @stream_list : streams to be configured
1317  *
1318  * RETURN     :
1319  *
1320  *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)1321 int QCamera3HardwareInterface::configureStreams(
1322         camera3_stream_configuration_t *streamList)
1323 {
1324     ATRACE_CALL();
1325     int rc = 0;
1326 
1327     // Acquire perfLock before configure streams
1328     m_perfLock.lock_acq();
1329     rc = configureStreamsPerfLocked(streamList);
1330     m_perfLock.lock_rel();
1331 
1332     return rc;
1333 }
1334 
1335 /*===========================================================================
1336  * FUNCTION   : configureStreamsPerfLocked
1337  *
1338  * DESCRIPTION: configureStreams while perfLock is held.
1339  *
1340  * PARAMETERS :
1341  *   @stream_list : streams to be configured
1342  *
1343  * RETURN     : int32_t type of status
1344  *              NO_ERROR  -- success
1345  *              none-zero failure code
1346  *==========================================================================*/
configureStreamsPerfLocked(camera3_stream_configuration_t * streamList)1347 int QCamera3HardwareInterface::configureStreamsPerfLocked(
1348         camera3_stream_configuration_t *streamList)
1349 {
1350     ATRACE_CALL();
1351     int rc = 0;
1352 
1353     // Sanity check stream_list
1354     if (streamList == NULL) {
1355         LOGE("NULL stream configuration");
1356         return BAD_VALUE;
1357     }
1358     if (streamList->streams == NULL) {
1359         LOGE("NULL stream list");
1360         return BAD_VALUE;
1361     }
1362 
1363     if (streamList->num_streams < 1) {
1364         LOGE("Bad number of streams requested: %d",
1365                 streamList->num_streams);
1366         return BAD_VALUE;
1367     }
1368 
1369     if (streamList->num_streams >= MAX_NUM_STREAMS) {
1370         LOGE("Maximum number of streams %d exceeded: %d",
1371                 MAX_NUM_STREAMS, streamList->num_streams);
1372         return BAD_VALUE;
1373     }
1374 
1375     mOpMode = streamList->operation_mode;
1376     LOGD("mOpMode: %d", mOpMode);
1377 
1378     /* first invalidate all the steams in the mStreamList
1379      * if they appear again, they will be validated */
1380     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1381             it != mStreamInfo.end(); it++) {
1382         QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1383         if (channel) {
1384           channel->stop();
1385         }
1386         (*it)->status = INVALID;
1387     }
1388 
1389     if (mRawDumpChannel) {
1390         mRawDumpChannel->stop();
1391         delete mRawDumpChannel;
1392         mRawDumpChannel = NULL;
1393     }
1394 
1395     if (mSupportChannel)
1396         mSupportChannel->stop();
1397 
1398     if (mAnalysisChannel) {
1399         mAnalysisChannel->stop();
1400     }
1401     if (mMetadataChannel) {
1402         /* If content of mStreamInfo is not 0, there is metadata stream */
1403         mMetadataChannel->stop();
1404     }
1405     if (mChannelHandle) {
1406         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1407                 mChannelHandle);
1408         LOGD("stopping channel %d", mChannelHandle);
1409     }
1410 
1411     pthread_mutex_lock(&mMutex);
1412 
1413     // Check state
1414     switch (mState) {
1415         case INITIALIZED:
1416         case CONFIGURED:
1417         case STARTED:
1418             /* valid state */
1419             break;
1420         default:
1421             LOGE("Invalid state %d", mState);
1422             pthread_mutex_unlock(&mMutex);
1423             return -ENODEV;
1424     }
1425 
1426     /* Check whether we have video stream */
1427     m_bIs4KVideo = false;
1428     m_bIsVideo = false;
1429     m_bEisSupportedSize = false;
1430     m_bTnrEnabled = false;
1431     bool isZsl = false;
1432     uint32_t videoWidth = 0U;
1433     uint32_t videoHeight = 0U;
1434     size_t rawStreamCnt = 0;
1435     size_t stallStreamCnt = 0;
1436     size_t processedStreamCnt = 0;
1437     // Number of streams on ISP encoder path
1438     size_t numStreamsOnEncoder = 0;
1439     size_t numYuv888OnEncoder = 0;
1440     bool bYuv888OverrideJpeg = false;
1441     cam_dimension_t largeYuv888Size = {0, 0};
1442     cam_dimension_t maxViewfinderSize = {0, 0};
1443     bool bJpegExceeds4K = false;
1444     bool bJpegOnEncoder = false;
1445     bool bUseCommonFeatureMask = false;
1446     cam_feature_mask_t commonFeatureMask = 0;
1447     bool bSmallJpegSize = false;
1448     uint32_t width_ratio;
1449     uint32_t height_ratio;
1450     maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1451     camera3_stream_t *inputStream = NULL;
1452     bool isJpeg = false;
1453     cam_dimension_t jpegSize = {0, 0};
1454 
1455     cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1456 
1457     /*EIS configuration*/
1458     bool eisSupported = false;
1459     bool oisSupported = false;
1460     int32_t margin_index = -1;
1461     uint8_t eis_prop_set;
1462     uint32_t maxEisWidth = 0;
1463     uint32_t maxEisHeight = 0;
1464 
1465     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1466 
1467     size_t count = IS_TYPE_MAX;
1468     count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1469     for (size_t i = 0; i < count; i++) {
1470         if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1471             eisSupported = true;
1472             margin_index = (int32_t)i;
1473             break;
1474         }
1475     }
1476 
1477     count = CAM_OPT_STAB_MAX;
1478     count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1479     for (size_t i = 0; i < count; i++) {
1480         if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1481             oisSupported = true;
1482             break;
1483         }
1484     }
1485 
1486     if (eisSupported) {
1487         maxEisWidth = MAX_EIS_WIDTH;
1488         maxEisHeight = MAX_EIS_HEIGHT;
1489     }
1490 
1491     /* EIS setprop control */
1492     char eis_prop[PROPERTY_VALUE_MAX];
1493     memset(eis_prop, 0, sizeof(eis_prop));
1494     property_get("persist.camera.eis.enable", eis_prop, "0");
1495     eis_prop_set = (uint8_t)atoi(eis_prop);
1496 
1497     m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1498             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1499 
1500     /* stream configurations */
1501     for (size_t i = 0; i < streamList->num_streams; i++) {
1502         camera3_stream_t *newStream = streamList->streams[i];
1503         LOGI("stream[%d] type = %d, format = %d, width = %d, "
1504                 "height = %d, rotation = %d, usage = 0x%x",
1505                  i, newStream->stream_type, newStream->format,
1506                 newStream->width, newStream->height, newStream->rotation,
1507                 newStream->usage);
1508         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1509                 newStream->stream_type == CAMERA3_STREAM_INPUT){
1510             isZsl = true;
1511         }
1512         if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1513             inputStream = newStream;
1514         }
1515 
1516         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1517             isJpeg = true;
1518             jpegSize.width = newStream->width;
1519             jpegSize.height = newStream->height;
1520             if (newStream->width > VIDEO_4K_WIDTH ||
1521                     newStream->height > VIDEO_4K_HEIGHT)
1522                 bJpegExceeds4K = true;
1523         }
1524 
1525         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1526                 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1527             m_bIsVideo = true;
1528             videoWidth = newStream->width;
1529             videoHeight = newStream->height;
1530             if ((VIDEO_4K_WIDTH <= newStream->width) &&
1531                     (VIDEO_4K_HEIGHT <= newStream->height)) {
1532                 m_bIs4KVideo = true;
1533             }
1534             m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1535                                   (newStream->height <= maxEisHeight);
1536         }
1537         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1538                 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1539             switch (newStream->format) {
1540             case HAL_PIXEL_FORMAT_BLOB:
1541                 stallStreamCnt++;
1542                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1543                         newStream->height)) {
1544                     numStreamsOnEncoder++;
1545                     bJpegOnEncoder = true;
1546                 }
1547                 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1548                         newStream->width);
1549                 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1550                         newStream->height);;
1551                 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1552                         "FATAL: max_downscale_factor cannot be zero and so assert");
1553                 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1554                     (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1555                     LOGH("Setting small jpeg size flag to true");
1556                     bSmallJpegSize = true;
1557                 }
1558                 break;
1559             case HAL_PIXEL_FORMAT_RAW10:
1560             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1561             case HAL_PIXEL_FORMAT_RAW16:
1562                 rawStreamCnt++;
1563                 break;
1564             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1565                 processedStreamCnt++;
1566                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1567                         newStream->height)) {
1568                     if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1569                             !IS_USAGE_ZSL(newStream->usage)) {
1570                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1571                     }
1572                     numStreamsOnEncoder++;
1573                 }
1574                 break;
1575             case HAL_PIXEL_FORMAT_YCbCr_420_888:
1576                 processedStreamCnt++;
1577                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1578                         newStream->height)) {
1579                     // If Yuv888 size is not greater than 4K, set feature mask
1580                     // to SUPERSET so that it support concurrent request on
1581                     // YUV and JPEG.
1582                     if (newStream->width <= VIDEO_4K_WIDTH &&
1583                             newStream->height <= VIDEO_4K_HEIGHT) {
1584                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1585                     }
1586                     numStreamsOnEncoder++;
1587                     numYuv888OnEncoder++;
1588                     largeYuv888Size.width = newStream->width;
1589                     largeYuv888Size.height = newStream->height;
1590                 }
1591                 break;
1592             default:
1593                 processedStreamCnt++;
1594                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1595                         newStream->height)) {
1596                     commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1597                     numStreamsOnEncoder++;
1598                 }
1599                 break;
1600             }
1601 
1602         }
1603     }
1604 
1605     if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1606         !m_bIsVideo) {
1607         m_bEisEnable = false;
1608     }
1609 
1610     /* Logic to enable/disable TNR based on specific config size/etc.*/
1611     if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1612             ((videoWidth == 1920 && videoHeight == 1080) ||
1613             (videoWidth == 1280 && videoHeight == 720)) &&
1614             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1615         m_bTnrEnabled = true;
1616 
1617     /* Check if num_streams is sane */
1618     if (stallStreamCnt > MAX_STALLING_STREAMS ||
1619             rawStreamCnt > MAX_RAW_STREAMS ||
1620             processedStreamCnt > MAX_PROCESSED_STREAMS) {
1621         LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1622                  stallStreamCnt, rawStreamCnt, processedStreamCnt);
1623         pthread_mutex_unlock(&mMutex);
1624         return -EINVAL;
1625     }
1626     /* Check whether we have zsl stream or 4k video case */
1627     if (isZsl && m_bIsVideo) {
1628         LOGE("Currently invalid configuration ZSL&Video!");
1629         pthread_mutex_unlock(&mMutex);
1630         return -EINVAL;
1631     }
1632     /* Check if stream sizes are sane */
1633     if (numStreamsOnEncoder > 2) {
1634         LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1635         pthread_mutex_unlock(&mMutex);
1636         return -EINVAL;
1637     } else if (1 < numStreamsOnEncoder){
1638         bUseCommonFeatureMask = true;
1639         LOGH("Multiple streams above max viewfinder size, common mask needed");
1640     }
1641 
1642     /* Check if BLOB size is greater than 4k in 4k recording case */
1643     if (m_bIs4KVideo && bJpegExceeds4K) {
1644         LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1645         pthread_mutex_unlock(&mMutex);
1646         return -EINVAL;
1647     }
1648 
1649     // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1650     // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1651     // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1652     // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1653     // configurations:
1654     //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1655     //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1656     //    (These two configurations will not have CAC2 enabled even in HQ modes.)
1657     if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1658         ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1659                 __func__);
1660         pthread_mutex_unlock(&mMutex);
1661         return -EINVAL;
1662     }
1663 
1664     // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1665     // the YUV stream's size is greater or equal to the JPEG size, set common
1666     // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1667     if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1668             jpegSize.width, jpegSize.height) &&
1669             largeYuv888Size.width > jpegSize.width &&
1670             largeYuv888Size.height > jpegSize.height) {
1671         bYuv888OverrideJpeg = true;
1672     } else if (!isJpeg && numStreamsOnEncoder > 1) {
1673         commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1674     }
1675 
1676     LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1677             maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1678             commonFeatureMask);
1679     LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1680             numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1681 
1682     rc = validateStreamDimensions(streamList);
1683     if (rc == NO_ERROR) {
1684         rc = validateStreamRotations(streamList);
1685     }
1686     if (rc != NO_ERROR) {
1687         LOGE("Invalid stream configuration requested!");
1688         pthread_mutex_unlock(&mMutex);
1689         return rc;
1690     }
1691 
1692     camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1693     camera3_stream_t *jpegStream = NULL;
1694     for (size_t i = 0; i < streamList->num_streams; i++) {
1695         camera3_stream_t *newStream = streamList->streams[i];
1696         LOGH("newStream type = %d, stream format = %d "
1697                 "stream size : %d x %d, stream rotation = %d",
1698                  newStream->stream_type, newStream->format,
1699                 newStream->width, newStream->height, newStream->rotation);
1700         //if the stream is in the mStreamList validate it
1701         bool stream_exists = false;
1702         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1703                 it != mStreamInfo.end(); it++) {
1704             if ((*it)->stream == newStream) {
1705                 QCamera3ProcessingChannel *channel =
1706                     (QCamera3ProcessingChannel*)(*it)->stream->priv;
1707                 stream_exists = true;
1708                 if (channel)
1709                     delete channel;
1710                 (*it)->status = VALID;
1711                 (*it)->stream->priv = NULL;
1712                 (*it)->channel = NULL;
1713             }
1714         }
1715         if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1716             //new stream
1717             stream_info_t* stream_info;
1718             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1719             if (!stream_info) {
1720                LOGE("Could not allocate stream info");
1721                rc = -ENOMEM;
1722                pthread_mutex_unlock(&mMutex);
1723                return rc;
1724             }
1725             stream_info->stream = newStream;
1726             stream_info->status = VALID;
1727             stream_info->channel = NULL;
1728             mStreamInfo.push_back(stream_info);
1729         }
1730         /* Covers Opaque ZSL and API1 F/W ZSL */
1731         if (IS_USAGE_ZSL(newStream->usage)
1732                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1733             if (zslStream != NULL) {
1734                 LOGE("Multiple input/reprocess streams requested!");
1735                 pthread_mutex_unlock(&mMutex);
1736                 return BAD_VALUE;
1737             }
1738             zslStream = newStream;
1739         }
1740         /* Covers YUV reprocess */
1741         if (inputStream != NULL) {
1742             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1743                     && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1744                     && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1745                     && inputStream->width == newStream->width
1746                     && inputStream->height == newStream->height) {
1747                 if (zslStream != NULL) {
1748                     /* This scenario indicates multiple YUV streams with same size
1749                      * as input stream have been requested, since zsl stream handle
1750                      * is solely use for the purpose of overriding the size of streams
1751                      * which share h/w streams we will just make a guess here as to
1752                      * which of the stream is a ZSL stream, this will be refactored
1753                      * once we make generic logic for streams sharing encoder output
1754                      */
1755                     LOGH("Warning, Multiple ip/reprocess streams requested!");
1756                 }
1757                 zslStream = newStream;
1758             }
1759         }
1760         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1761             jpegStream = newStream;
1762         }
1763     }
1764 
1765     /* If a zsl stream is set, we know that we have configured at least one input or
1766        bidirectional stream */
1767     if (NULL != zslStream) {
1768         mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1769         mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1770         mInputStreamInfo.format = zslStream->format;
1771         mInputStreamInfo.usage = zslStream->usage;
1772         LOGD("Input stream configured! %d x %d, format %d, usage %d",
1773                  mInputStreamInfo.dim.width,
1774                 mInputStreamInfo.dim.height,
1775                 mInputStreamInfo.format, mInputStreamInfo.usage);
1776     }
1777 
1778     cleanAndSortStreamInfo();
1779     if (mMetadataChannel) {
1780         delete mMetadataChannel;
1781         mMetadataChannel = NULL;
1782     }
1783     if (mSupportChannel) {
1784         delete mSupportChannel;
1785         mSupportChannel = NULL;
1786     }
1787 
1788     if (mAnalysisChannel) {
1789         delete mAnalysisChannel;
1790         mAnalysisChannel = NULL;
1791     }
1792 
1793     if (mDummyBatchChannel) {
1794         delete mDummyBatchChannel;
1795         mDummyBatchChannel = NULL;
1796     }
1797 
1798     //Create metadata channel and initialize it
1799     cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1800     setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1801             gCamCapability[mCameraId]->color_arrangement);
1802     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1803                     mChannelHandle, mCameraHandle->ops, captureResultCb,
1804                     setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
1805     if (mMetadataChannel == NULL) {
1806         LOGE("failed to allocate metadata channel");
1807         rc = -ENOMEM;
1808         pthread_mutex_unlock(&mMutex);
1809         return rc;
1810     }
1811     rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1812     if (rc < 0) {
1813         LOGE("metadata channel initialization failed");
1814         delete mMetadataChannel;
1815         mMetadataChannel = NULL;
1816         pthread_mutex_unlock(&mMutex);
1817         return rc;
1818     }
1819 
1820     // Create analysis stream all the time, even when h/w support is not available
1821     {
1822         cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1823         cam_analysis_info_t analysisInfo;
1824         rc = mCommon.getAnalysisInfo(
1825                 FALSE,
1826                 TRUE,
1827                 analysisFeatureMask,
1828                 &analysisInfo);
1829         if (rc != NO_ERROR) {
1830             LOGE("getAnalysisInfo failed, ret = %d", rc);
1831             pthread_mutex_unlock(&mMutex);
1832             return rc;
1833         }
1834 
1835         cam_color_filter_arrangement_t analysis_color_arrangement =
1836                 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
1837                 CAM_FILTER_ARRANGEMENT_Y :
1838                 gCamCapability[mCameraId]->color_arrangement);
1839         setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
1840                 analysis_color_arrangement);
1841 
1842         mAnalysisChannel = new QCamera3SupportChannel(
1843                 mCameraHandle->camera_handle,
1844                 mChannelHandle,
1845                 mCameraHandle->ops,
1846                 &analysisInfo.analysis_padding_info,
1847                 analysisFeatureMask,
1848                 CAM_STREAM_TYPE_ANALYSIS,
1849                 &analysisInfo.analysis_max_res,
1850                 (analysisInfo.analysis_format
1851                 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1852                 : CAM_FORMAT_YUV_420_NV21),
1853                 analysisInfo.hw_analysis_supported,
1854                 this,
1855                 0); // force buffer count to 0
1856         if (!mAnalysisChannel) {
1857             LOGE("H/W Analysis channel cannot be created");
1858             pthread_mutex_unlock(&mMutex);
1859             return -ENOMEM;
1860         }
1861     }
1862 
1863     bool isRawStreamRequested = false;
1864     memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1865     /* Allocate channel objects for the requested streams */
1866     for (size_t i = 0; i < streamList->num_streams; i++) {
1867         camera3_stream_t *newStream = streamList->streams[i];
1868         uint32_t stream_usage = newStream->usage;
1869         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1870         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1871         if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1872                 || IS_USAGE_ZSL(newStream->usage)) &&
1873             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1874             mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1875             if (bUseCommonFeatureMask) {
1876                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1877                         commonFeatureMask;
1878             } else {
1879                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1880                         CAM_QCOM_FEATURE_NONE;
1881             }
1882 
1883         } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1884                 LOGH("Input stream configured, reprocess config");
1885         } else {
1886             //for non zsl streams find out the format
1887             switch (newStream->format) {
1888             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1889             {
1890                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1891                         CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1892                 /* add additional features to pp feature mask */
1893                 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1894                         mStreamConfigInfo.num_streams);
1895 
1896                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1897                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1898                                 CAM_STREAM_TYPE_VIDEO;
1899                     if (m_bTnrEnabled && m_bTnrVideo) {
1900                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1901                             CAM_QCOM_FEATURE_CPP_TNR;
1902                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1903                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1904                                 ~CAM_QCOM_FEATURE_CDS;
1905                     }
1906                 } else {
1907                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1908                             CAM_STREAM_TYPE_PREVIEW;
1909                     if (m_bTnrEnabled && m_bTnrPreview) {
1910                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1911                                 CAM_QCOM_FEATURE_CPP_TNR;
1912                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1913                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1914                                 ~CAM_QCOM_FEATURE_CDS;
1915                     }
1916                     padding_info.width_padding = mSurfaceStridePadding;
1917                     padding_info.height_padding = CAM_PAD_TO_2;
1918                 }
1919                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1920                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1921                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1922                             newStream->height;
1923                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1924                             newStream->width;
1925                 }
1926             }
1927             break;
1928             case HAL_PIXEL_FORMAT_YCbCr_420_888:
1929                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1930                 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1931                     if (bUseCommonFeatureMask)
1932                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1933                                 commonFeatureMask;
1934                     else
1935                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1936                                 CAM_QCOM_FEATURE_NONE;
1937                 } else {
1938                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1939                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1940                 }
1941             break;
1942             case HAL_PIXEL_FORMAT_BLOB:
1943                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1944                 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1945                 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1946                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1947                              CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1948                 } else {
1949                     if (bUseCommonFeatureMask &&
1950                             isOnEncoder(maxViewfinderSize, newStream->width,
1951                             newStream->height)) {
1952                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1953                     } else {
1954                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1955                     }
1956                 }
1957                 if (isZsl) {
1958                     if (zslStream) {
1959                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1960                                 (int32_t)zslStream->width;
1961                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1962                                 (int32_t)zslStream->height;
1963                     } else {
1964                         LOGE("Error, No ZSL stream identified");
1965                         pthread_mutex_unlock(&mMutex);
1966                         return -EINVAL;
1967                     }
1968                 } else if (m_bIs4KVideo) {
1969                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
1970                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
1971                 } else if (bYuv888OverrideJpeg) {
1972                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1973                             (int32_t)largeYuv888Size.width;
1974                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1975                             (int32_t)largeYuv888Size.height;
1976                 }
1977                 break;
1978             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1979             case HAL_PIXEL_FORMAT_RAW16:
1980             case HAL_PIXEL_FORMAT_RAW10:
1981                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1982                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1983                 isRawStreamRequested = true;
1984                 break;
1985             default:
1986                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1987                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1988                 break;
1989             }
1990         }
1991 
1992         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1993                 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1994                 gCamCapability[mCameraId]->color_arrangement);
1995 
1996         if (newStream->priv == NULL) {
1997             //New stream, construct channel
1998             switch (newStream->stream_type) {
1999             case CAMERA3_STREAM_INPUT:
2000                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2001                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2002                 break;
2003             case CAMERA3_STREAM_BIDIRECTIONAL:
2004                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2005                     GRALLOC_USAGE_HW_CAMERA_WRITE;
2006                 break;
2007             case CAMERA3_STREAM_OUTPUT:
2008                 /* For video encoding stream, set read/write rarely
2009                  * flag so that they may be set to un-cached */
2010                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2011                     newStream->usage |=
2012                          (GRALLOC_USAGE_SW_READ_RARELY |
2013                          GRALLOC_USAGE_SW_WRITE_RARELY |
2014                          GRALLOC_USAGE_HW_CAMERA_WRITE);
2015                 else if (IS_USAGE_ZSL(newStream->usage))
2016                 {
2017                     LOGD("ZSL usage flag skipping");
2018                 }
2019                 else if (newStream == zslStream
2020                         || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2021                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2022                 } else
2023                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2024                 break;
2025             default:
2026                 LOGE("Invalid stream_type %d", newStream->stream_type);
2027                 break;
2028             }
2029 
2030             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2031                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2032                 QCamera3ProcessingChannel *channel = NULL;
2033                 switch (newStream->format) {
2034                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2035                     if ((newStream->usage &
2036                             private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2037                             (streamList->operation_mode ==
2038                             CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2039                     ) {
2040                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2041                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
2042                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2043                                 this,
2044                                 newStream,
2045                                 (cam_stream_type_t)
2046                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2047                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2048                                 mMetadataChannel,
2049                                 0); //heap buffers are not required for HFR video channel
2050                         if (channel == NULL) {
2051                             LOGE("allocation of channel failed");
2052                             pthread_mutex_unlock(&mMutex);
2053                             return -ENOMEM;
2054                         }
2055                         //channel->getNumBuffers() will return 0 here so use
2056                         //MAX_INFLIGH_HFR_REQUESTS
2057                         newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2058                         newStream->priv = channel;
2059                         LOGI("num video buffers in HFR mode: %d",
2060                                  MAX_INFLIGHT_HFR_REQUESTS);
2061                     } else {
2062                         /* Copy stream contents in HFR preview only case to create
2063                          * dummy batch channel so that sensor streaming is in
2064                          * HFR mode */
2065                         if (!m_bIsVideo && (streamList->operation_mode ==
2066                                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2067                             mDummyBatchStream = *newStream;
2068                         }
2069                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2070                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
2071                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2072                                 this,
2073                                 newStream,
2074                                 (cam_stream_type_t)
2075                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2076                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2077                                 mMetadataChannel,
2078                                 MAX_INFLIGHT_REQUESTS);
2079                         if (channel == NULL) {
2080                             LOGE("allocation of channel failed");
2081                             pthread_mutex_unlock(&mMutex);
2082                             return -ENOMEM;
2083                         }
2084                         newStream->max_buffers = MAX_INFLIGHT_60FPS_REQUESTS;
2085                         newStream->priv = channel;
2086                     }
2087                     break;
2088                 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2089                     channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2090                             mChannelHandle,
2091                             mCameraHandle->ops, captureResultCb,
2092                             setBufferErrorStatus, &padding_info,
2093                             this,
2094                             newStream,
2095                             (cam_stream_type_t)
2096                                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2097                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2098                             mMetadataChannel);
2099                     if (channel == NULL) {
2100                         LOGE("allocation of YUV channel failed");
2101                         pthread_mutex_unlock(&mMutex);
2102                         return -ENOMEM;
2103                     }
2104                     newStream->max_buffers = channel->getNumBuffers();
2105                     newStream->priv = channel;
2106                     break;
2107                 }
2108                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2109                 case HAL_PIXEL_FORMAT_RAW16:
2110                 case HAL_PIXEL_FORMAT_RAW10:
2111                     mRawChannel = new QCamera3RawChannel(
2112                             mCameraHandle->camera_handle, mChannelHandle,
2113                             mCameraHandle->ops, captureResultCb,
2114                             setBufferErrorStatus, &padding_info,
2115                             this, newStream,
2116                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2117                             mMetadataChannel,
2118                             (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2119                     if (mRawChannel == NULL) {
2120                         LOGE("allocation of raw channel failed");
2121                         pthread_mutex_unlock(&mMutex);
2122                         return -ENOMEM;
2123                     }
2124                     newStream->max_buffers = mRawChannel->getNumBuffers();
2125                     newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2126                     break;
2127                 case HAL_PIXEL_FORMAT_BLOB:
2128                     // Max live snapshot inflight buffer is 1. This is to mitigate
2129                     // frame drop issues for video snapshot. The more buffers being
2130                     // allocated, the more frame drops there are.
2131                     mPictureChannel = new QCamera3PicChannel(
2132                             mCameraHandle->camera_handle, mChannelHandle,
2133                             mCameraHandle->ops, captureResultCb,
2134                             setBufferErrorStatus, &padding_info, this, newStream,
2135                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2136                             m_bIs4KVideo, isZsl, mMetadataChannel,
2137                             (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2138                     if (mPictureChannel == NULL) {
2139                         LOGE("allocation of channel failed");
2140                         pthread_mutex_unlock(&mMutex);
2141                         return -ENOMEM;
2142                     }
2143                     newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2144                     newStream->max_buffers = mPictureChannel->getNumBuffers();
2145                     mPictureChannel->overrideYuvSize(
2146                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2147                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2148                     break;
2149 
2150                 default:
2151                     LOGE("not a supported format 0x%x", newStream->format);
2152                     break;
2153                 }
2154             } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2155                 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2156             } else {
2157                 LOGE("Error, Unknown stream type");
2158                 pthread_mutex_unlock(&mMutex);
2159                 return -EINVAL;
2160             }
2161 
2162             QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2163             if (channel != NULL && channel->isUBWCEnabled()) {
2164                 cam_format_t fmt = channel->getStreamDefaultFormat(
2165                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2166                         newStream->width, newStream->height);
2167                 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2168                     newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2169                 }
2170             }
2171 
2172             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2173                     it != mStreamInfo.end(); it++) {
2174                 if ((*it)->stream == newStream) {
2175                     (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2176                     break;
2177                 }
2178             }
2179         } else {
2180             // Channel already exists for this stream
2181             // Do nothing for now
2182         }
2183         padding_info = gCamCapability[mCameraId]->padding_info;
2184 
2185         /* Do not add entries for input stream in metastream info
2186          * since there is no real stream associated with it
2187          */
2188         if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2189             mStreamConfigInfo.num_streams++;
2190     }
2191 
2192     //RAW DUMP channel
2193     if (mEnableRawDump && isRawStreamRequested == false){
2194         cam_dimension_t rawDumpSize;
2195         rawDumpSize = getMaxRawSize(mCameraId);
2196         cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2197         setPAAFSupport(rawDumpFeatureMask,
2198                 CAM_STREAM_TYPE_RAW,
2199                 gCamCapability[mCameraId]->color_arrangement);
2200         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2201                                   mChannelHandle,
2202                                   mCameraHandle->ops,
2203                                   rawDumpSize,
2204                                   &padding_info,
2205                                   this, rawDumpFeatureMask);
2206         if (!mRawDumpChannel) {
2207             LOGE("Raw Dump channel cannot be created");
2208             pthread_mutex_unlock(&mMutex);
2209             return -ENOMEM;
2210         }
2211     }
2212 
2213 
2214     if (mAnalysisChannel) {
2215         cam_analysis_info_t analysisInfo;
2216         memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2217         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2218                 CAM_STREAM_TYPE_ANALYSIS;
2219         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2220                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2221         rc = mCommon.getAnalysisInfo(FALSE, TRUE,
2222                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2223                 &analysisInfo);
2224         if (rc != NO_ERROR) {
2225             LOGE("getAnalysisInfo failed, ret = %d", rc);
2226             pthread_mutex_unlock(&mMutex);
2227             return rc;
2228         }
2229         cam_color_filter_arrangement_t analysis_color_arrangement =
2230                 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2231                 CAM_FILTER_ARRANGEMENT_Y :
2232                 gCamCapability[mCameraId]->color_arrangement);
2233         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2234                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2235                 analysis_color_arrangement);
2236 
2237         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2238                 analysisInfo.analysis_max_res;
2239         mStreamConfigInfo.num_streams++;
2240     }
2241 
2242     if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2243         cam_analysis_info_t supportInfo;
2244         memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2245         cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2246         setPAAFSupport(callbackFeatureMask,
2247                 CAM_STREAM_TYPE_CALLBACK,
2248                 gCamCapability[mCameraId]->color_arrangement);
2249         rc = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
2250         if (rc != NO_ERROR) {
2251             LOGE("getAnalysisInfo failed, ret = %d", rc);
2252             pthread_mutex_unlock(&mMutex);
2253             return rc;
2254         }
2255         mSupportChannel = new QCamera3SupportChannel(
2256                 mCameraHandle->camera_handle,
2257                 mChannelHandle,
2258                 mCameraHandle->ops,
2259                 &gCamCapability[mCameraId]->padding_info,
2260                 callbackFeatureMask,
2261                 CAM_STREAM_TYPE_CALLBACK,
2262                 &QCamera3SupportChannel::kDim,
2263                 CAM_FORMAT_YUV_420_NV21,
2264                 supportInfo.hw_analysis_supported,
2265                 this, 0);
2266         if (!mSupportChannel) {
2267             LOGE("dummy channel cannot be created");
2268             pthread_mutex_unlock(&mMutex);
2269             return -ENOMEM;
2270         }
2271     }
2272 
2273     if (mSupportChannel) {
2274         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2275                 QCamera3SupportChannel::kDim;
2276         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2277                 CAM_STREAM_TYPE_CALLBACK;
2278         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2279                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2280         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2281                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2282                 gCamCapability[mCameraId]->color_arrangement);
2283         mStreamConfigInfo.num_streams++;
2284     }
2285 
2286     if (mRawDumpChannel) {
2287         cam_dimension_t rawSize;
2288         rawSize = getMaxRawSize(mCameraId);
2289         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2290                 rawSize;
2291         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2292                 CAM_STREAM_TYPE_RAW;
2293         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2294                 CAM_QCOM_FEATURE_NONE;
2295         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2296                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2297                 gCamCapability[mCameraId]->color_arrangement);
2298         mStreamConfigInfo.num_streams++;
2299     }
2300     /* In HFR mode, if video stream is not added, create a dummy channel so that
2301      * ISP can create a batch mode even for preview only case. This channel is
2302      * never 'start'ed (no stream-on), it is only 'initialized'  */
2303     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2304             !m_bIsVideo) {
2305         cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2306         setPAAFSupport(dummyFeatureMask,
2307                 CAM_STREAM_TYPE_VIDEO,
2308                 gCamCapability[mCameraId]->color_arrangement);
2309         mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2310                 mChannelHandle,
2311                 mCameraHandle->ops, captureResultCb,
2312                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2313                 this,
2314                 &mDummyBatchStream,
2315                 CAM_STREAM_TYPE_VIDEO,
2316                 dummyFeatureMask,
2317                 mMetadataChannel);
2318         if (NULL == mDummyBatchChannel) {
2319             LOGE("creation of mDummyBatchChannel failed."
2320                     "Preview will use non-hfr sensor mode ");
2321         }
2322     }
2323     if (mDummyBatchChannel) {
2324         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2325                 mDummyBatchStream.width;
2326         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2327                 mDummyBatchStream.height;
2328         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2329                 CAM_STREAM_TYPE_VIDEO;
2330         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2331                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2332         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2333                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2334                 gCamCapability[mCameraId]->color_arrangement);
2335         mStreamConfigInfo.num_streams++;
2336     }
2337 
2338     mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2339     mStreamConfigInfo.buffer_info.max_buffers =
2340             m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2341 
2342     /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2343     for (pendingRequestIterator i = mPendingRequestsList.begin();
2344             i != mPendingRequestsList.end();) {
2345         i = erasePendingRequest(i);
2346     }
2347     mPendingFrameDropList.clear();
2348     // Initialize/Reset the pending buffers list
2349     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2350         req.mPendingBufferList.clear();
2351     }
2352     mPendingBuffersMap.mPendingBuffersInRequest.clear();
2353 
2354     mPendingReprocessResultList.clear();
2355 
2356     mCurJpegMeta.clear();
2357     //Get min frame duration for this streams configuration
2358     deriveMinFrameDuration();
2359 
2360     // Update state
2361     mState = CONFIGURED;
2362 
2363     pthread_mutex_unlock(&mMutex);
2364 
2365     return rc;
2366 }
2367 
2368 /*===========================================================================
2369  * FUNCTION   : validateCaptureRequest
2370  *
2371  * DESCRIPTION: validate a capture request from camera service
2372  *
2373  * PARAMETERS :
2374  *   @request : request from framework to process
2375  *
2376  * RETURN     :
2377  *
2378  *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request)2379 int QCamera3HardwareInterface::validateCaptureRequest(
2380                     camera3_capture_request_t *request)
2381 {
2382     ssize_t idx = 0;
2383     const camera3_stream_buffer_t *b;
2384     CameraMetadata meta;
2385 
2386     /* Sanity check the request */
2387     if (request == NULL) {
2388         LOGE("NULL capture request");
2389         return BAD_VALUE;
2390     }
2391 
2392     if ((request->settings == NULL) && (mState == CONFIGURED)) {
2393         /*settings cannot be null for the first request*/
2394         return BAD_VALUE;
2395     }
2396 
2397     uint32_t frameNumber = request->frame_number;
2398     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2399         LOGE("Request %d: No output buffers provided!",
2400                 __FUNCTION__, frameNumber);
2401         return BAD_VALUE;
2402     }
2403     if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2404         LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2405                  request->num_output_buffers, MAX_NUM_STREAMS);
2406         return BAD_VALUE;
2407     }
2408     if (request->input_buffer != NULL) {
2409         b = request->input_buffer;
2410         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2411             LOGE("Request %d: Buffer %ld: Status not OK!",
2412                      frameNumber, (long)idx);
2413             return BAD_VALUE;
2414         }
2415         if (b->release_fence != -1) {
2416             LOGE("Request %d: Buffer %ld: Has a release fence!",
2417                      frameNumber, (long)idx);
2418             return BAD_VALUE;
2419         }
2420         if (b->buffer == NULL) {
2421             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2422                      frameNumber, (long)idx);
2423             return BAD_VALUE;
2424         }
2425     }
2426 
2427     // Validate all buffers
2428     b = request->output_buffers;
2429     do {
2430         QCamera3ProcessingChannel *channel =
2431                 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2432         if (channel == NULL) {
2433             LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2434                      frameNumber, (long)idx);
2435             return BAD_VALUE;
2436         }
2437         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2438             LOGE("Request %d: Buffer %ld: Status not OK!",
2439                      frameNumber, (long)idx);
2440             return BAD_VALUE;
2441         }
2442         if (b->release_fence != -1) {
2443             LOGE("Request %d: Buffer %ld: Has a release fence!",
2444                      frameNumber, (long)idx);
2445             return BAD_VALUE;
2446         }
2447         if (b->buffer == NULL) {
2448             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2449                      frameNumber, (long)idx);
2450             return BAD_VALUE;
2451         }
2452         if (*(b->buffer) == NULL) {
2453             LOGE("Request %d: Buffer %ld: NULL private handle!",
2454                      frameNumber, (long)idx);
2455             return BAD_VALUE;
2456         }
2457         idx++;
2458         b = request->output_buffers + idx;
2459     } while (idx < (ssize_t)request->num_output_buffers);
2460 
2461     return NO_ERROR;
2462 }
2463 
2464 /*===========================================================================
2465  * FUNCTION   : deriveMinFrameDuration
2466  *
2467  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2468  *              on currently configured streams.
2469  *
2470  * PARAMETERS : NONE
2471  *
2472  * RETURN     : NONE
2473  *
2474  *==========================================================================*/
deriveMinFrameDuration()2475 void QCamera3HardwareInterface::deriveMinFrameDuration()
2476 {
2477     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2478 
2479     maxJpegDim = 0;
2480     maxProcessedDim = 0;
2481     maxRawDim = 0;
2482 
2483     // Figure out maximum jpeg, processed, and raw dimensions
2484     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2485         it != mStreamInfo.end(); it++) {
2486 
2487         // Input stream doesn't have valid stream_type
2488         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2489             continue;
2490 
2491         int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2492         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2493             if (dimension > maxJpegDim)
2494                 maxJpegDim = dimension;
2495         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2496                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2497                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2498             if (dimension > maxRawDim)
2499                 maxRawDim = dimension;
2500         } else {
2501             if (dimension > maxProcessedDim)
2502                 maxProcessedDim = dimension;
2503         }
2504     }
2505 
2506     size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2507             MAX_SIZES_CNT);
2508 
2509     //Assume all jpeg dimensions are in processed dimensions.
2510     if (maxJpegDim > maxProcessedDim)
2511         maxProcessedDim = maxJpegDim;
2512     //Find the smallest raw dimension that is greater or equal to jpeg dimension
2513     if (maxProcessedDim > maxRawDim) {
2514         maxRawDim = INT32_MAX;
2515 
2516         for (size_t i = 0; i < count; i++) {
2517             int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2518                     gCamCapability[mCameraId]->raw_dim[i].height;
2519             if (dimension >= maxProcessedDim && dimension < maxRawDim)
2520                 maxRawDim = dimension;
2521         }
2522     }
2523 
2524     //Find minimum durations for processed, jpeg, and raw
2525     for (size_t i = 0; i < count; i++) {
2526         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2527                 gCamCapability[mCameraId]->raw_dim[i].height) {
2528             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2529             break;
2530         }
2531     }
2532     count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2533     for (size_t i = 0; i < count; i++) {
2534         if (maxProcessedDim ==
2535                 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2536                 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2537             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2538             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2539             break;
2540         }
2541     }
2542 }
2543 
2544 /*===========================================================================
2545  * FUNCTION   : getMinFrameDuration
2546  *
2547  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2548  *              and current request configuration.
2549  *
2550  * PARAMETERS : @request: requset sent by the frameworks
2551  *
2552  * RETURN     : min farme duration for a particular request
2553  *
2554  *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)2555 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2556 {
2557     bool hasJpegStream = false;
2558     bool hasRawStream = false;
2559     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2560         const camera3_stream_t *stream = request->output_buffers[i].stream;
2561         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2562             hasJpegStream = true;
2563         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2564                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2565                 stream->format == HAL_PIXEL_FORMAT_RAW16)
2566             hasRawStream = true;
2567     }
2568 
2569     if (!hasJpegStream)
2570         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2571     else
2572         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2573 }
2574 
2575 /*===========================================================================
2576  * FUNCTION   : handleBuffersDuringFlushLock
2577  *
2578  * DESCRIPTION: Account for buffers returned from back-end during flush
2579  *              This function is executed while mMutex is held by the caller.
2580  *
2581  * PARAMETERS :
2582  *   @buffer: image buffer for the callback
2583  *
2584  * RETURN     :
2585  *==========================================================================*/
handleBuffersDuringFlushLock(camera3_stream_buffer_t * buffer)2586 void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2587 {
2588     bool buffer_found = false;
2589     for (List<PendingBuffersInRequest>::iterator req =
2590             mPendingBuffersMap.mPendingBuffersInRequest.begin();
2591             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2592         for (List<PendingBufferInfo>::iterator i =
2593                 req->mPendingBufferList.begin();
2594                 i != req->mPendingBufferList.end(); i++) {
2595             if (i->buffer == buffer->buffer) {
2596                 mPendingBuffersMap.numPendingBufsAtFlush--;
2597                 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2598                     buffer->buffer, req->frame_number,
2599                     mPendingBuffersMap.numPendingBufsAtFlush);
2600                 buffer_found = true;
2601                 break;
2602             }
2603         }
2604         if (buffer_found) {
2605             break;
2606         }
2607     }
2608     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2609         //signal the flush()
2610         LOGD("All buffers returned to HAL. Continue flush");
2611         pthread_cond_signal(&mBuffersCond);
2612     }
2613 }
2614 
2615 
2616 /*===========================================================================
2617  * FUNCTION   : handlePendingReprocResults
2618  *
2619  * DESCRIPTION: check and notify on any pending reprocess results
2620  *
2621  * PARAMETERS :
2622  *   @frame_number   : Pending request frame number
2623  *
2624  * RETURN     : int32_t type of status
2625  *              NO_ERROR  -- success
2626  *              none-zero failure code
2627  *==========================================================================*/
handlePendingReprocResults(uint32_t frame_number)2628 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2629 {
2630     for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2631             j != mPendingReprocessResultList.end(); j++) {
2632         if (j->frame_number == frame_number) {
2633             mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2634 
2635             LOGD("Delayed reprocess notify %d",
2636                     frame_number);
2637 
2638             for (pendingRequestIterator k = mPendingRequestsList.begin();
2639                     k != mPendingRequestsList.end(); k++) {
2640 
2641                 if (k->frame_number == j->frame_number) {
2642                     LOGD("Found reprocess frame number %d in pending reprocess List "
2643                             "Take it out!!",
2644                             k->frame_number);
2645 
2646                     camera3_capture_result result;
2647                     memset(&result, 0, sizeof(camera3_capture_result));
2648                     result.frame_number = frame_number;
2649                     result.num_output_buffers = 1;
2650                     result.output_buffers =  &j->buffer;
2651                     result.input_buffer = k->input_buffer;
2652                     result.result = k->settings;
2653                     result.partial_result = PARTIAL_RESULT_COUNT;
2654                     mCallbackOps->process_capture_result(mCallbackOps, &result);
2655 
2656                     erasePendingRequest(k);
2657                     break;
2658                 }
2659             }
2660             mPendingReprocessResultList.erase(j);
2661             break;
2662         }
2663     }
2664     return NO_ERROR;
2665 }
2666 
2667 /*===========================================================================
2668  * FUNCTION   : handleBatchMetadata
2669  *
2670  * DESCRIPTION: Handles metadata buffer callback in batch mode
2671  *
2672  * PARAMETERS : @metadata_buf: metadata buffer
2673  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2674  *                 the meta buf in this method
2675  *
2676  * RETURN     :
2677  *
2678  *==========================================================================*/
handleBatchMetadata(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf)2679 void QCamera3HardwareInterface::handleBatchMetadata(
2680         mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2681 {
2682     ATRACE_CALL();
2683 
2684     if (NULL == metadata_buf) {
2685         LOGE("metadata_buf is NULL");
2686         return;
2687     }
2688     /* In batch mode, the metdata will contain the frame number and timestamp of
2689      * the last frame in the batch. Eg: a batch containing buffers from request
2690      * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2691      * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2692      * multiple process_capture_results */
2693     metadata_buffer_t *metadata =
2694             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2695     int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2696     uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2697     uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2698     uint32_t frame_number = 0, urgent_frame_number = 0;
2699     int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2700     bool invalid_metadata = false;
2701     size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2702     size_t loopCount = 1;
2703 
2704     int32_t *p_frame_number_valid =
2705             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2706     uint32_t *p_frame_number =
2707             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2708     int64_t *p_capture_time =
2709             POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2710     int32_t *p_urgent_frame_number_valid =
2711             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2712     uint32_t *p_urgent_frame_number =
2713             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2714 
2715     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2716             (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2717             (NULL == p_urgent_frame_number)) {
2718         LOGE("Invalid metadata");
2719         invalid_metadata = true;
2720     } else {
2721         frame_number_valid = *p_frame_number_valid;
2722         last_frame_number = *p_frame_number;
2723         last_frame_capture_time = *p_capture_time;
2724         urgent_frame_number_valid = *p_urgent_frame_number_valid;
2725         last_urgent_frame_number = *p_urgent_frame_number;
2726     }
2727 
2728     /* In batchmode, when no video buffers are requested, set_parms are sent
2729      * for every capture_request. The difference between consecutive urgent
2730      * frame numbers and frame numbers should be used to interpolate the
2731      * corresponding frame numbers and time stamps */
2732     pthread_mutex_lock(&mMutex);
2733     if (urgent_frame_number_valid) {
2734         ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2735         if(idx < 0) {
2736             LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2737                 last_urgent_frame_number);
2738             mState = ERROR;
2739             pthread_mutex_unlock(&mMutex);
2740             return;
2741         }
2742         first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
2743         urgentFrameNumDiff = last_urgent_frame_number + 1 -
2744                 first_urgent_frame_number;
2745 
2746         LOGH("urgent_frm: valid: %d frm_num: %d - %d",
2747                  urgent_frame_number_valid,
2748                 first_urgent_frame_number, last_urgent_frame_number);
2749     }
2750 
2751     if (frame_number_valid) {
2752         ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2753         if(idx < 0) {
2754             LOGE("Invalid frame number received: %d. Irrecoverable error",
2755                 last_frame_number);
2756             mState = ERROR;
2757             pthread_mutex_unlock(&mMutex);
2758             return;
2759         }
2760         first_frame_number = mPendingBatchMap.valueAt(idx);
2761         frameNumDiff = last_frame_number + 1 -
2762                 first_frame_number;
2763         mPendingBatchMap.removeItem(last_frame_number);
2764 
2765         LOGH("frm: valid: %d frm_num: %d - %d",
2766                  frame_number_valid,
2767                 first_frame_number, last_frame_number);
2768 
2769     }
2770     pthread_mutex_unlock(&mMutex);
2771 
2772     if (urgent_frame_number_valid || frame_number_valid) {
2773         loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2774         if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2775             LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2776                      urgentFrameNumDiff, last_urgent_frame_number);
2777         if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2778             LOGE("frameNumDiff: %d frameNum: %d",
2779                      frameNumDiff, last_frame_number);
2780     }
2781 
2782     for (size_t i = 0; i < loopCount; i++) {
2783         /* handleMetadataWithLock is called even for invalid_metadata for
2784          * pipeline depth calculation */
2785         if (!invalid_metadata) {
2786             /* Infer frame number. Batch metadata contains frame number of the
2787              * last frame */
2788             if (urgent_frame_number_valid) {
2789                 if (i < urgentFrameNumDiff) {
2790                     urgent_frame_number =
2791                             first_urgent_frame_number + i;
2792                     LOGD("inferred urgent frame_number: %d",
2793                              urgent_frame_number);
2794                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2795                             CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2796                 } else {
2797                     /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2798                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2799                             CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2800                 }
2801             }
2802 
2803             /* Infer frame number. Batch metadata contains frame number of the
2804              * last frame */
2805             if (frame_number_valid) {
2806                 if (i < frameNumDiff) {
2807                     frame_number = first_frame_number + i;
2808                     LOGD("inferred frame_number: %d", frame_number);
2809                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2810                             CAM_INTF_META_FRAME_NUMBER, frame_number);
2811                 } else {
2812                     /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2813                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2814                              CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2815                 }
2816             }
2817 
2818             if (last_frame_capture_time) {
2819                 //Infer timestamp
2820                 first_frame_capture_time = last_frame_capture_time -
2821                         (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
2822                 capture_time =
2823                         first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
2824                 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2825                         CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2826                 LOGH("batch capture_time: %lld, capture_time: %lld",
2827                          last_frame_capture_time, capture_time);
2828             }
2829         }
2830         pthread_mutex_lock(&mMutex);
2831         handleMetadataWithLock(metadata_buf,
2832                 false /* free_and_bufdone_meta_buf */,
2833                 (i == 0) /* first metadata in the batch metadata */);
2834         pthread_mutex_unlock(&mMutex);
2835     }
2836 
2837     /* BufDone metadata buffer */
2838     if (free_and_bufdone_meta_buf) {
2839         mMetadataChannel->bufDone(metadata_buf);
2840         free(metadata_buf);
2841     }
2842 }
2843 
notifyError(uint32_t frameNumber,camera3_error_msg_code_t errorCode)2844 void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2845         camera3_error_msg_code_t errorCode)
2846 {
2847     camera3_notify_msg_t notify_msg;
2848     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2849     notify_msg.type = CAMERA3_MSG_ERROR;
2850     notify_msg.message.error.error_code = errorCode;
2851     notify_msg.message.error.error_stream = NULL;
2852     notify_msg.message.error.frame_number = frameNumber;
2853     mCallbackOps->notify(mCallbackOps, &notify_msg);
2854 
2855     return;
2856 }
2857 /*===========================================================================
2858  * FUNCTION   : handleMetadataWithLock
2859  *
2860  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2861  *
2862  * PARAMETERS : @metadata_buf: metadata buffer
2863  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2864  *                 the meta buf in this method
2865  *              @firstMetadataInBatch: Boolean to indicate whether this is the
2866  *                  first metadata in a batch. Valid only for batch mode
2867  *
2868  * RETURN     :
2869  *
2870  *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf,bool firstMetadataInBatch)2871 void QCamera3HardwareInterface::handleMetadataWithLock(
2872     mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2873     bool firstMetadataInBatch)
2874 {
2875     ATRACE_CALL();
2876     if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2877         //during flush do not send metadata from this thread
2878         LOGD("not sending metadata during flush or when mState is error");
2879         if (free_and_bufdone_meta_buf) {
2880             mMetadataChannel->bufDone(metadata_buf);
2881             free(metadata_buf);
2882         }
2883         return;
2884     }
2885 
2886     //not in flush
2887     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2888     int32_t frame_number_valid, urgent_frame_number_valid;
2889     uint32_t frame_number, urgent_frame_number;
2890     int64_t capture_time;
2891     nsecs_t currentSysTime;
2892 
2893     int32_t *p_frame_number_valid =
2894             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2895     uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2896     int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2897     int32_t *p_urgent_frame_number_valid =
2898             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2899     uint32_t *p_urgent_frame_number =
2900             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2901     IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2902             metadata) {
2903         LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2904                  *p_frame_number_valid, *p_frame_number);
2905     }
2906 
2907     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2908             (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2909         LOGE("Invalid metadata");
2910         if (free_and_bufdone_meta_buf) {
2911             mMetadataChannel->bufDone(metadata_buf);
2912             free(metadata_buf);
2913         }
2914         goto done_metadata;
2915     }
2916     frame_number_valid =        *p_frame_number_valid;
2917     frame_number =              *p_frame_number;
2918     capture_time =              *p_capture_time;
2919     urgent_frame_number_valid = *p_urgent_frame_number_valid;
2920     urgent_frame_number =       *p_urgent_frame_number;
2921     currentSysTime =            systemTime(CLOCK_MONOTONIC);
2922 
2923     // Detect if buffers from any requests are overdue
2924     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2925         if ( (currentSysTime - req.timestamp) >
2926             s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2927             for (auto &missed : req.mPendingBufferList) {
2928                 assert(missed.stream->priv);
2929                 if (missed.stream->priv) {
2930                     QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
2931                     assert(ch->mStreams[0]);
2932                     if (ch->mStreams[0]) {
2933                         LOGW("Missing: frame = %d, buffer = %p,"
2934                             "stream type = %d, stream format = %d",
2935                             req.frame_number, missed.buffer,
2936                             ch->mStreams[0]->getMyType(), missed.stream->format);
2937                         ch->timeoutFrame(req.frame_number);
2938                     }
2939                 }
2940             }
2941         }
2942     }
2943     //Partial result on process_capture_result for timestamp
2944     if (urgent_frame_number_valid) {
2945         LOGD("valid urgent frame_number = %u, capture_time = %lld",
2946            urgent_frame_number, capture_time);
2947 
2948         //Recieved an urgent Frame Number, handle it
2949         //using partial results
2950         for (pendingRequestIterator i =
2951                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2952             LOGD("Iterator Frame = %d urgent frame = %d",
2953                  i->frame_number, urgent_frame_number);
2954 
2955             if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2956                 (i->partial_result_cnt == 0)) {
2957                 LOGE("Error: HAL missed urgent metadata for frame number %d",
2958                          i->frame_number);
2959             }
2960 
2961             if (i->frame_number == urgent_frame_number &&
2962                      i->bUrgentReceived == 0) {
2963 
2964                 camera3_capture_result_t result;
2965                 memset(&result, 0, sizeof(camera3_capture_result_t));
2966 
2967                 i->partial_result_cnt++;
2968                 i->bUrgentReceived = 1;
2969                 // Extract 3A metadata
2970                 result.result =
2971                     translateCbUrgentMetadataToResultMetadata(metadata);
2972                 // Populate metadata result
2973                 result.frame_number = urgent_frame_number;
2974                 result.num_output_buffers = 0;
2975                 result.output_buffers = NULL;
2976                 result.partial_result = i->partial_result_cnt;
2977 
2978                 mCallbackOps->process_capture_result(mCallbackOps, &result);
2979                 LOGD("urgent frame_number = %u, capture_time = %lld",
2980                       result.frame_number, capture_time);
2981                 free_camera_metadata((camera_metadata_t *)result.result);
2982                 break;
2983             }
2984         }
2985     }
2986 
2987     if (!frame_number_valid) {
2988         LOGD("Not a valid normal frame number, used as SOF only");
2989         if (free_and_bufdone_meta_buf) {
2990             mMetadataChannel->bufDone(metadata_buf);
2991             free(metadata_buf);
2992         }
2993         goto done_metadata;
2994     }
2995     LOGH("valid frame_number = %u, capture_time = %lld",
2996             frame_number, capture_time);
2997 
2998     for (pendingRequestIterator i = mPendingRequestsList.begin();
2999             i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
3000         // Flush out all entries with less or equal frame numbers.
3001 
3002         camera3_capture_result_t result;
3003         memset(&result, 0, sizeof(camera3_capture_result_t));
3004 
3005         LOGD("frame_number in the list is %u", i->frame_number);
3006         i->partial_result_cnt++;
3007         result.partial_result = i->partial_result_cnt;
3008 
3009         // Check whether any stream buffer corresponding to this is dropped or not
3010         // If dropped, then send the ERROR_BUFFER for the corresponding stream
3011         // The API does not expect a blob buffer to be dropped
3012         if (p_cam_frame_drop) {
3013             /* Clear notify_msg structure */
3014             camera3_notify_msg_t notify_msg;
3015             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3016             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3017                     j != i->buffers.end(); j++) {
3018                 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
3019                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3020                 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
3021                     if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
3022                         // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3023                         LOGE("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
3024                                 __func__, i->frame_number, streamID, j->stream->format);
3025                         notify_msg.type = CAMERA3_MSG_ERROR;
3026                         notify_msg.message.error.frame_number = i->frame_number;
3027                         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
3028                         notify_msg.message.error.error_stream = j->stream;
3029                         mCallbackOps->notify(mCallbackOps, &notify_msg);
3030                         LOGE("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
3031                                 __func__, i->frame_number, streamID, j->stream->format);
3032                         PendingFrameDropInfo PendingFrameDrop;
3033                         PendingFrameDrop.frame_number=i->frame_number;
3034                         PendingFrameDrop.stream_ID = streamID;
3035                         // Add the Frame drop info to mPendingFrameDropList
3036                         mPendingFrameDropList.push_back(PendingFrameDrop);
3037                    }
3038                }
3039             }
3040         }
3041 
3042         // Send empty metadata with already filled buffers for dropped metadata
3043         // and send valid metadata with already filled buffers for current metadata
3044         /* we could hit this case when we either
3045          * 1. have a pending reprocess request or
3046          * 2. miss a metadata buffer callback */
3047         if (i->frame_number < frame_number) {
3048             if (i->input_buffer) {
3049                 /* this will be handled in handleInputBufferWithLock */
3050                 i++;
3051                 continue;
3052             } else if (mBatchSize) {
3053 
3054                 mPendingLiveRequest--;
3055 
3056                 CameraMetadata dummyMetadata;
3057                 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3058                 result.result = dummyMetadata.release();
3059 
3060                 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
3061             } else {
3062                 LOGE("Fatal: Missing metadata buffer for frame number %d", i->frame_number);
3063                 if (free_and_bufdone_meta_buf) {
3064                     mMetadataChannel->bufDone(metadata_buf);
3065                     free(metadata_buf);
3066                 }
3067                 mState = ERROR;
3068                 goto done_metadata;
3069             }
3070         } else {
3071             mPendingLiveRequest--;
3072             /* Clear notify_msg structure */
3073             camera3_notify_msg_t notify_msg;
3074             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3075 
3076             // Send shutter notify to frameworks
3077             notify_msg.type = CAMERA3_MSG_SHUTTER;
3078             notify_msg.message.shutter.frame_number = i->frame_number;
3079             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3080             mCallbackOps->notify(mCallbackOps, &notify_msg);
3081 
3082             i->timestamp = capture_time;
3083 
3084             /* Set the timestamp in display metadata so that clients aware of
3085                private_handle such as VT can use this un-modified timestamps.
3086                Camera framework is unaware of this timestamp and cannot change this */
3087             updateTimeStampInPendingBuffers(i->frame_number, i->timestamp);
3088 
3089             // Find channel requiring metadata, meaning internal offline postprocess
3090             // is needed.
3091             //TODO: for now, we don't support two streams requiring metadata at the same time.
3092             // (because we are not making copies, and metadata buffer is not reference counted.
3093             bool internalPproc = false;
3094             for (pendingBufferIterator iter = i->buffers.begin();
3095                     iter != i->buffers.end(); iter++) {
3096                 if (iter->need_metadata) {
3097                     internalPproc = true;
3098                     QCamera3ProcessingChannel *channel =
3099                             (QCamera3ProcessingChannel *)iter->stream->priv;
3100                     channel->queueReprocMetadata(metadata_buf);
3101                     break;
3102                 }
3103             }
3104 
3105             // atrace_begin(ATRACE_TAG_ALWAYS, "translateFromHalMetadata");
3106             result.result = translateFromHalMetadata(metadata,
3107                     i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
3108                     i->capture_intent, i->hybrid_ae_enable,
3109                      /* DevCamDebug metadata translateFromHalMetadata function call*/
3110                     i->DevCamDebug_meta_enable,
3111                     /* DevCamDebug metadata end */
3112                     internalPproc, i->fwkCacMode,
3113                     firstMetadataInBatch);
3114             // atrace_end(ATRACE_TAG_ALWAYS);
3115 
3116             saveExifParams(metadata);
3117 
3118             if (i->blob_request) {
3119                 {
3120                     //Dump tuning metadata if enabled and available
3121                     char prop[PROPERTY_VALUE_MAX];
3122                     memset(prop, 0, sizeof(prop));
3123                     property_get("persist.camera.dumpmetadata", prop, "0");
3124                     int32_t enabled = atoi(prop);
3125                     if (enabled && metadata->is_tuning_params_valid) {
3126                         dumpMetadataToFile(metadata->tuning_params,
3127                                mMetaFrameCount,
3128                                enabled,
3129                                "Snapshot",
3130                                frame_number);
3131                     }
3132                 }
3133             }
3134 
3135             if (!internalPproc) {
3136                 LOGD("couldn't find need_metadata for this metadata");
3137                 // Return metadata buffer
3138                 if (free_and_bufdone_meta_buf) {
3139                     mMetadataChannel->bufDone(metadata_buf);
3140                     free(metadata_buf);
3141                 }
3142             }
3143         }
3144         if (!result.result) {
3145             LOGE("metadata is NULL");
3146         }
3147         result.frame_number = i->frame_number;
3148         result.input_buffer = i->input_buffer;
3149         result.num_output_buffers = 0;
3150         result.output_buffers = NULL;
3151         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3152                     j != i->buffers.end(); j++) {
3153             if (j->buffer) {
3154                 result.num_output_buffers++;
3155             }
3156         }
3157 
3158         updateFpsInPreviewBuffer(metadata, i->frame_number);
3159 
3160         if (result.num_output_buffers > 0) {
3161             camera3_stream_buffer_t *result_buffers =
3162                 new camera3_stream_buffer_t[result.num_output_buffers];
3163             if (result_buffers != NULL) {
3164                 size_t result_buffers_idx = 0;
3165                 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3166                         j != i->buffers.end(); j++) {
3167                     if (j->buffer) {
3168                         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3169                                 m != mPendingFrameDropList.end(); m++) {
3170                             QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3171                             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3172                             if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3173                                 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3174                                 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3175                                         frame_number, streamID);
3176                                 m = mPendingFrameDropList.erase(m);
3177                                 break;
3178                             }
3179                         }
3180                         j->buffer->status |= mPendingBuffersMap.getBufErrStatus(j->buffer->buffer);
3181                         mPendingBuffersMap.removeBuf(j->buffer->buffer);
3182                         result_buffers[result_buffers_idx++] = *(j->buffer);
3183                         free(j->buffer);
3184                         j->buffer = NULL;
3185                     }
3186                 }
3187 
3188                 result.output_buffers = result_buffers;
3189                 mCallbackOps->process_capture_result(mCallbackOps, &result);
3190                 LOGD("meta frame_number = %u, capture_time = %lld",
3191                         result.frame_number, i->timestamp);
3192                 free_camera_metadata((camera_metadata_t *)result.result);
3193                 delete[] result_buffers;
3194             }else {
3195                 LOGE("Fatal error: out of memory");
3196             }
3197         } else {
3198             mCallbackOps->process_capture_result(mCallbackOps, &result);
3199             LOGD("meta frame_number = %u, capture_time = %lld",
3200                     result.frame_number, i->timestamp);
3201             free_camera_metadata((camera_metadata_t *)result.result);
3202         }
3203 
3204         i = erasePendingRequest(i);
3205 
3206         if (!mPendingReprocessResultList.empty()) {
3207             handlePendingReprocResults(frame_number + 1);
3208         }
3209     }
3210 
3211 done_metadata:
3212     for (pendingRequestIterator i = mPendingRequestsList.begin();
3213             i != mPendingRequestsList.end() ;i++) {
3214         i->pipeline_depth++;
3215     }
3216     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3217     unblockRequestIfNecessary();
3218 }
3219 
3220 /*===========================================================================
3221  * FUNCTION   : hdrPlusPerfLock
3222  *
3223  * DESCRIPTION: perf lock for HDR+ using custom intent
3224  *
3225  * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3226  *
3227  * RETURN     : None
3228  *
3229  *==========================================================================*/
hdrPlusPerfLock(mm_camera_super_buf_t * metadata_buf)3230 void QCamera3HardwareInterface::hdrPlusPerfLock(
3231         mm_camera_super_buf_t *metadata_buf)
3232 {
3233     if (NULL == metadata_buf) {
3234         LOGE("metadata_buf is NULL");
3235         return;
3236     }
3237     metadata_buffer_t *metadata =
3238             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3239     int32_t *p_frame_number_valid =
3240             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3241     uint32_t *p_frame_number =
3242             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3243 
3244     if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3245         LOGE("%s: Invalid metadata", __func__);
3246         return;
3247     }
3248 
3249     //acquire perf lock for 5 sec after the last HDR frame is captured
3250     if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3251         if ((p_frame_number != NULL) &&
3252                 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3253             m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3254         }
3255     }
3256 
3257     //release lock after perf lock timer is expired. If lock is already released,
3258     //isTimerReset returns false
3259     if (m_perfLock.isTimerReset()) {
3260         mLastCustIntentFrmNum = -1;
3261         m_perfLock.lock_rel_timed();
3262     }
3263 }
3264 
3265 /*===========================================================================
3266  * FUNCTION   : handleInputBufferWithLock
3267  *
3268  * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3269  *
3270  * PARAMETERS : @frame_number: frame number of the input buffer
3271  *
3272  * RETURN     :
3273  *
3274  *==========================================================================*/
handleInputBufferWithLock(uint32_t frame_number)3275 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3276 {
3277     ATRACE_CALL();
3278     pendingRequestIterator i = mPendingRequestsList.begin();
3279     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3280         i++;
3281     }
3282     if (i != mPendingRequestsList.end() && i->input_buffer) {
3283         //found the right request
3284         if (!i->shutter_notified) {
3285             CameraMetadata settings;
3286             camera3_notify_msg_t notify_msg;
3287             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3288             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3289             if(i->settings) {
3290                 settings = i->settings;
3291                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3292                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3293                 } else {
3294                     LOGE("No timestamp in input settings! Using current one.");
3295                 }
3296             } else {
3297                 LOGE("Input settings missing!");
3298             }
3299 
3300             notify_msg.type = CAMERA3_MSG_SHUTTER;
3301             notify_msg.message.shutter.frame_number = frame_number;
3302             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3303             mCallbackOps->notify(mCallbackOps, &notify_msg);
3304             i->shutter_notified = true;
3305             LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3306                         i->frame_number, notify_msg.message.shutter.timestamp);
3307         }
3308 
3309         if (i->input_buffer->release_fence != -1) {
3310            int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3311            close(i->input_buffer->release_fence);
3312            if (rc != OK) {
3313                LOGE("input buffer sync wait failed %d", rc);
3314            }
3315         }
3316 
3317         camera3_capture_result result;
3318         memset(&result, 0, sizeof(camera3_capture_result));
3319         result.frame_number = frame_number;
3320         result.result = i->settings;
3321         result.input_buffer = i->input_buffer;
3322         result.partial_result = PARTIAL_RESULT_COUNT;
3323 
3324         mCallbackOps->process_capture_result(mCallbackOps, &result);
3325         LOGD("Input request metadata and input buffer frame_number = %u",
3326                         i->frame_number);
3327         i = erasePendingRequest(i);
3328     } else {
3329         LOGE("Could not find input request for frame number %d", frame_number);
3330     }
3331 }
3332 
3333 /*===========================================================================
3334  * FUNCTION   : handleBufferWithLock
3335  *
3336  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3337  *
3338  * PARAMETERS : @buffer: image buffer for the callback
3339  *              @frame_number: frame number of the image buffer
3340  *
3341  * RETURN     :
3342  *
3343  *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)3344 void QCamera3HardwareInterface::handleBufferWithLock(
3345     camera3_stream_buffer_t *buffer, uint32_t frame_number)
3346 {
3347     ATRACE_CALL();
3348     /* Nothing to be done during error state */
3349     if ((ERROR == mState) || (DEINIT == mState)) {
3350         return;
3351     }
3352     if (mFlushPerf) {
3353         handleBuffersDuringFlushLock(buffer);
3354         return;
3355     }
3356     //not in flush
3357     // If the frame number doesn't exist in the pending request list,
3358     // directly send the buffer to the frameworks, and update pending buffers map
3359     // Otherwise, book-keep the buffer.
3360     pendingRequestIterator i = mPendingRequestsList.begin();
3361     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3362         i++;
3363     }
3364     if (i == mPendingRequestsList.end()) {
3365         // Verify all pending requests frame_numbers are greater
3366         for (pendingRequestIterator j = mPendingRequestsList.begin();
3367                 j != mPendingRequestsList.end(); j++) {
3368             if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3369                 LOGW("Error: pending live frame number %d is smaller than %d",
3370                          j->frame_number, frame_number);
3371             }
3372         }
3373         camera3_capture_result_t result;
3374         memset(&result, 0, sizeof(camera3_capture_result_t));
3375         result.result = NULL;
3376         result.frame_number = frame_number;
3377         result.num_output_buffers = 1;
3378         result.partial_result = 0;
3379         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3380                 m != mPendingFrameDropList.end(); m++) {
3381             QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3382             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3383             if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3384                 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3385                 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3386                          frame_number, streamID);
3387                 m = mPendingFrameDropList.erase(m);
3388                 break;
3389             }
3390         }
3391         buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
3392         result.output_buffers = buffer;
3393         LOGH("result frame_number = %d, buffer = %p",
3394                  frame_number, buffer->buffer);
3395 
3396         mPendingBuffersMap.removeBuf(buffer->buffer);
3397 
3398         mCallbackOps->process_capture_result(mCallbackOps, &result);
3399     } else {
3400         if (i->input_buffer) {
3401             CameraMetadata settings;
3402             camera3_notify_msg_t notify_msg;
3403             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3404             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3405             if(i->settings) {
3406                 settings = i->settings;
3407                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3408                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3409                 } else {
3410                     LOGW("No timestamp in input settings! Using current one.");
3411                 }
3412             } else {
3413                 LOGE("Input settings missing!");
3414             }
3415 
3416             notify_msg.type = CAMERA3_MSG_SHUTTER;
3417             notify_msg.message.shutter.frame_number = frame_number;
3418             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3419 
3420             if (i->input_buffer->release_fence != -1) {
3421                int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3422                close(i->input_buffer->release_fence);
3423                if (rc != OK) {
3424                    LOGE("input buffer sync wait failed %d", rc);
3425                }
3426             }
3427             buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
3428             mPendingBuffersMap.removeBuf(buffer->buffer);
3429 
3430             bool notifyNow = true;
3431             for (pendingRequestIterator j = mPendingRequestsList.begin();
3432                     j != mPendingRequestsList.end(); j++) {
3433                 if (j->frame_number < frame_number) {
3434                     notifyNow = false;
3435                     break;
3436                 }
3437             }
3438 
3439             if (notifyNow) {
3440                 camera3_capture_result result;
3441                 memset(&result, 0, sizeof(camera3_capture_result));
3442                 result.frame_number = frame_number;
3443                 result.result = i->settings;
3444                 result.input_buffer = i->input_buffer;
3445                 result.num_output_buffers = 1;
3446                 result.output_buffers = buffer;
3447                 result.partial_result = PARTIAL_RESULT_COUNT;
3448 
3449                 mCallbackOps->notify(mCallbackOps, &notify_msg);
3450                 mCallbackOps->process_capture_result(mCallbackOps, &result);
3451                 LOGD("Notify reprocess now %d!", frame_number);
3452                 i = erasePendingRequest(i);
3453             } else {
3454                 // Cache reprocess result for later
3455                 PendingReprocessResult pendingResult;
3456                 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3457                 pendingResult.notify_msg = notify_msg;
3458                 pendingResult.buffer = *buffer;
3459                 pendingResult.frame_number = frame_number;
3460                 mPendingReprocessResultList.push_back(pendingResult);
3461                 LOGD("Cache reprocess result %d!", frame_number);
3462             }
3463         } else {
3464             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3465                 j != i->buffers.end(); j++) {
3466                 if (j->stream == buffer->stream) {
3467                     if (j->buffer != NULL) {
3468                         LOGE("Error: buffer is already set");
3469                     } else {
3470                         j->buffer = (camera3_stream_buffer_t *)malloc(
3471                             sizeof(camera3_stream_buffer_t));
3472                         *(j->buffer) = *buffer;
3473                         LOGH("cache buffer %p at result frame_number %u",
3474                              buffer->buffer, frame_number);
3475                     }
3476                 }
3477             }
3478         }
3479     }
3480 }
3481 
3482 /*===========================================================================
3483  * FUNCTION   : unblockRequestIfNecessary
3484  *
3485  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3486  *              that mMutex is held when this function is called.
3487  *
3488  * PARAMETERS :
3489  *
3490  * RETURN     :
3491  *
3492  *==========================================================================*/
unblockRequestIfNecessary()3493 void QCamera3HardwareInterface::unblockRequestIfNecessary()
3494 {
3495    // Unblock process_capture_request
3496    pthread_cond_signal(&mRequestCond);
3497 }
3498 
3499 
3500 /*===========================================================================
3501  * FUNCTION   : processCaptureRequest
3502  *
3503  * DESCRIPTION: process a capture request from camera service
3504  *
3505  * PARAMETERS :
3506  *   @request : request from framework to process
3507  *
3508  * RETURN     :
3509  *
3510  *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request)3511 int QCamera3HardwareInterface::processCaptureRequest(
3512                     camera3_capture_request_t *request)
3513 {
3514     ATRACE_CALL();
3515     int rc = NO_ERROR;
3516     int32_t request_id;
3517     CameraMetadata meta;
3518     bool isVidBufRequested = false;
3519     camera3_stream_buffer_t *pInputBuffer = NULL;
3520 
3521     pthread_mutex_lock(&mMutex);
3522 
3523     // Validate current state
3524     switch (mState) {
3525         case CONFIGURED:
3526         case STARTED:
3527             /* valid state */
3528             break;
3529 
3530         case ERROR:
3531             pthread_mutex_unlock(&mMutex);
3532             handleCameraDeviceError();
3533             return -ENODEV;
3534 
3535         default:
3536             LOGE("Invalid state %d", mState);
3537             pthread_mutex_unlock(&mMutex);
3538             return -ENODEV;
3539     }
3540 
3541     rc = validateCaptureRequest(request);
3542     if (rc != NO_ERROR) {
3543         LOGE("incoming request is not valid");
3544         pthread_mutex_unlock(&mMutex);
3545         return rc;
3546     }
3547 
3548     meta = request->settings;
3549 
3550     // For first capture request, send capture intent, and
3551     // stream on all streams
3552     if (mState == CONFIGURED) {
3553         // send an unconfigure to the backend so that the isp
3554         // resources are deallocated
3555         if (!mFirstConfiguration) {
3556             cam_stream_size_info_t stream_config_info;
3557             int32_t hal_version = CAM_HAL_V3;
3558             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3559             stream_config_info.buffer_info.min_buffers =
3560                     MIN_INFLIGHT_REQUESTS;
3561             stream_config_info.buffer_info.max_buffers =
3562                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3563             clear_metadata_buffer(mParameters);
3564             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3565                     CAM_INTF_PARM_HAL_VERSION, hal_version);
3566             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3567                     CAM_INTF_META_STREAM_INFO, stream_config_info);
3568             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3569                     mParameters);
3570             if (rc < 0) {
3571                 LOGE("set_parms for unconfigure failed");
3572                 pthread_mutex_unlock(&mMutex);
3573                 return rc;
3574             }
3575         }
3576         m_perfLock.lock_acq();
3577         /* get eis information for stream configuration */
3578         cam_is_type_t is_type;
3579         char is_type_value[PROPERTY_VALUE_MAX];
3580         property_get("persist.camera.is_type", is_type_value, "0");
3581         is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3582 
3583         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3584             int32_t hal_version = CAM_HAL_V3;
3585             uint8_t captureIntent =
3586                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3587             mCaptureIntent = captureIntent;
3588             clear_metadata_buffer(mParameters);
3589             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3590             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3591         }
3592 
3593         //If EIS is enabled, turn it on for video
3594         bool setEis = m_bEisEnable && m_bEisSupportedSize;
3595         int32_t vsMode;
3596         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3597         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3598             rc = BAD_VALUE;
3599         }
3600 
3601         //IS type will be 0 unless EIS is supported. If EIS is supported
3602         //it could either be 1 or 4 depending on the stream and video size
3603         if (setEis) {
3604             if (!m_bEisSupportedSize) {
3605                 is_type = IS_TYPE_DIS;
3606             } else {
3607                 is_type = IS_TYPE_EIS_2_0;
3608             }
3609             mStreamConfigInfo.is_type = is_type;
3610         } else {
3611             mStreamConfigInfo.is_type = IS_TYPE_NONE;
3612         }
3613 
3614         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3615                 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3616         int32_t tintless_value = 1;
3617         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3618                 CAM_INTF_PARM_TINTLESS, tintless_value);
3619         //Disable CDS for HFR mode or if DIS/EIS is on.
3620         //CDS is a session parameter in the backend/ISP, so need to be set/reset
3621         //after every configure_stream
3622         if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3623                 (m_bIsVideo)) {
3624             int32_t cds = CAM_CDS_MODE_OFF;
3625             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3626                     CAM_INTF_PARM_CDS_MODE, cds))
3627                 LOGE("Failed to disable CDS for HFR mode");
3628 
3629         }
3630 
3631         if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
3632             uint8_t* use_av_timer = NULL;
3633 
3634             if (m_debug_avtimer){
3635                 use_av_timer = &m_debug_avtimer;
3636             }
3637             else{
3638                 use_av_timer =
3639                     meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
3640             }
3641 
3642             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
3643                 rc = BAD_VALUE;
3644             }
3645         }
3646 
3647         setMobicat();
3648 
3649         /* Set fps and hfr mode while sending meta stream info so that sensor
3650          * can configure appropriate streaming mode */
3651         mHFRVideoFps = DEFAULT_VIDEO_FPS;
3652         mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
3653         mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3654         if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3655             rc = setHalFpsRange(meta, mParameters);
3656             if (rc == NO_ERROR) {
3657                 int32_t max_fps =
3658                     (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3659                 if (mBatchSize) {
3660                     /* For HFR, more buffers are dequeued upfront to improve the performance */
3661                     mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3662                     mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3663                 } else if (max_fps == 60) {
3664                     /* for 60 fps usecas increase inflight requests */
3665                     mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
3666                     mMaxInFlightRequests = MAX_INFLIGHT_60FPS_REQUESTS;
3667                 } else if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
3668                     /* for non 60 fps video use cases, set min = max inflight requests to
3669                     avoid frame drops due to degraded system performance */
3670                     mMinInFlightRequests = MAX_INFLIGHT_REQUESTS;
3671                 }
3672             }
3673             else {
3674                 LOGE("setHalFpsRange failed");
3675             }
3676         }
3677         if (meta.exists(ANDROID_CONTROL_MODE)) {
3678             uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3679             rc = extractSceneMode(meta, metaMode, mParameters);
3680             if (rc != NO_ERROR) {
3681                 LOGE("extractSceneMode failed");
3682             }
3683         }
3684         memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
3685 
3686 
3687         //TODO: validate the arguments, HSV scenemode should have only the
3688         //advertised fps ranges
3689 
3690         /*set the capture intent, hal version, tintless, stream info,
3691          *and disenable parameters to the backend*/
3692         LOGD("set_parms META_STREAM_INFO " );
3693         for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3694             LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
3695                     "Format:%d",
3696                     mStreamConfigInfo.type[i],
3697                     mStreamConfigInfo.stream_sizes[i].width,
3698                     mStreamConfigInfo.stream_sizes[i].height,
3699                     mStreamConfigInfo.postprocess_mask[i],
3700                     mStreamConfigInfo.format[i]);
3701         }
3702 
3703         rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3704                     mParameters);
3705         if (rc < 0) {
3706             LOGE("set_parms failed for hal version, stream info");
3707         }
3708 
3709         cam_dimension_t sensor_dim;
3710         memset(&sensor_dim, 0, sizeof(sensor_dim));
3711         rc = getSensorOutputSize(sensor_dim);
3712         if (rc != NO_ERROR) {
3713             LOGE("Failed to get sensor output size");
3714             pthread_mutex_unlock(&mMutex);
3715             goto error_exit;
3716         }
3717 
3718         mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3719                 gCamCapability[mCameraId]->active_array_size.height,
3720                 sensor_dim.width, sensor_dim.height);
3721 
3722         /* Set batchmode before initializing channel. Since registerBuffer
3723          * internally initializes some of the channels, better set batchmode
3724          * even before first register buffer */
3725         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3726             it != mStreamInfo.end(); it++) {
3727             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3728             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3729                     && mBatchSize) {
3730                 rc = channel->setBatchSize(mBatchSize);
3731                 //Disable per frame map unmap for HFR/batchmode case
3732                 rc |= channel->setPerFrameMapUnmap(false);
3733                 if (NO_ERROR != rc) {
3734                     LOGE("Channel init failed %d", rc);
3735                     pthread_mutex_unlock(&mMutex);
3736                     goto error_exit;
3737                 }
3738             }
3739         }
3740 
3741         //First initialize all streams
3742         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3743             it != mStreamInfo.end(); it++) {
3744             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3745             if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3746                ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3747                setEis)
3748                 rc = channel->initialize(is_type);
3749             else {
3750                 rc = channel->initialize(IS_TYPE_NONE);
3751             }
3752             if (NO_ERROR != rc) {
3753                 LOGE("Channel initialization failed %d", rc);
3754                 pthread_mutex_unlock(&mMutex);
3755                 goto error_exit;
3756             }
3757         }
3758 
3759         if (mRawDumpChannel) {
3760             rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3761             if (rc != NO_ERROR) {
3762                 LOGE("Error: Raw Dump Channel init failed");
3763                 pthread_mutex_unlock(&mMutex);
3764                 goto error_exit;
3765             }
3766         }
3767         if (mSupportChannel) {
3768             rc = mSupportChannel->initialize(IS_TYPE_NONE);
3769             if (rc < 0) {
3770                 LOGE("Support channel initialization failed");
3771                 pthread_mutex_unlock(&mMutex);
3772                 goto error_exit;
3773             }
3774         }
3775         if (mAnalysisChannel) {
3776             rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3777             if (rc < 0) {
3778                 LOGE("Analysis channel initialization failed");
3779                 pthread_mutex_unlock(&mMutex);
3780                 goto error_exit;
3781             }
3782         }
3783         if (mDummyBatchChannel) {
3784             rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3785             if (rc < 0) {
3786                 LOGE("mDummyBatchChannel setBatchSize failed");
3787                 pthread_mutex_unlock(&mMutex);
3788                 goto error_exit;
3789             }
3790             rc = mDummyBatchChannel->initialize(is_type);
3791             if (rc < 0) {
3792                 LOGE("mDummyBatchChannel initialization failed");
3793                 pthread_mutex_unlock(&mMutex);
3794                 goto error_exit;
3795             }
3796         }
3797 
3798         // Set bundle info
3799         rc = setBundleInfo();
3800         if (rc < 0) {
3801             LOGE("setBundleInfo failed %d", rc);
3802             pthread_mutex_unlock(&mMutex);
3803             goto error_exit;
3804         }
3805 
3806         //update settings from app here
3807         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3808             mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
3809             LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
3810         }
3811         if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
3812             mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
3813             LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
3814         }
3815         if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
3816             mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
3817             LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
3818 
3819             if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
3820                 (mLinkedCameraId != mCameraId) ) {
3821                 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
3822                     mLinkedCameraId, mCameraId);
3823                 goto error_exit;
3824             }
3825         }
3826 
3827         // add bundle related cameras
3828         LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
3829         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3830             if (mIsDeviceLinked)
3831                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
3832             else
3833                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
3834 
3835             pthread_mutex_lock(&gCamLock);
3836 
3837             if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
3838                 LOGE("Dualcam: Invalid Session Id ");
3839                 pthread_mutex_unlock(&gCamLock);
3840                 goto error_exit;
3841             }
3842 
3843             if (mIsMainCamera == 1) {
3844                 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
3845                 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
3846                 // related session id should be session id of linked session
3847                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3848             } else {
3849                 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
3850                 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
3851                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3852             }
3853             pthread_mutex_unlock(&gCamLock);
3854 
3855             rc = mCameraHandle->ops->sync_related_sensors(
3856                     mCameraHandle->camera_handle, m_pRelCamSyncBuf);
3857             if (rc < 0) {
3858                 LOGE("Dualcam: link failed");
3859                 goto error_exit;
3860             }
3861         }
3862 
3863         //Then start them.
3864         LOGH("Start META Channel");
3865         rc = mMetadataChannel->start();
3866         if (rc < 0) {
3867             LOGE("META channel start failed");
3868             pthread_mutex_unlock(&mMutex);
3869             goto error_exit;
3870         }
3871 
3872         if (mAnalysisChannel) {
3873             rc = mAnalysisChannel->start();
3874             if (rc < 0) {
3875                 LOGE("Analysis channel start failed");
3876                 mMetadataChannel->stop();
3877                 pthread_mutex_unlock(&mMutex);
3878                 goto error_exit;
3879             }
3880         }
3881 
3882         if (mSupportChannel) {
3883             rc = mSupportChannel->start();
3884             if (rc < 0) {
3885                 LOGE("Support channel start failed");
3886                 mMetadataChannel->stop();
3887                 /* Although support and analysis are mutually exclusive today
3888                    adding it in anycase for future proofing */
3889                 if (mAnalysisChannel) {
3890                     mAnalysisChannel->stop();
3891                 }
3892                 pthread_mutex_unlock(&mMutex);
3893                 goto error_exit;
3894             }
3895         }
3896         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3897             it != mStreamInfo.end(); it++) {
3898             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3899             LOGH("Start Processing Channel mask=%d",
3900                      channel->getStreamTypeMask());
3901             rc = channel->start();
3902             if (rc < 0) {
3903                 LOGE("channel start failed");
3904                 pthread_mutex_unlock(&mMutex);
3905                 goto error_exit;
3906             }
3907         }
3908 
3909         if (mRawDumpChannel) {
3910             LOGD("Starting raw dump stream");
3911             rc = mRawDumpChannel->start();
3912             if (rc != NO_ERROR) {
3913                 LOGE("Error Starting Raw Dump Channel");
3914                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3915                       it != mStreamInfo.end(); it++) {
3916                     QCamera3Channel *channel =
3917                         (QCamera3Channel *)(*it)->stream->priv;
3918                     LOGH("Stopping Processing Channel mask=%d",
3919                         channel->getStreamTypeMask());
3920                     channel->stop();
3921                 }
3922                 if (mSupportChannel)
3923                     mSupportChannel->stop();
3924                 if (mAnalysisChannel) {
3925                     mAnalysisChannel->stop();
3926                 }
3927                 mMetadataChannel->stop();
3928                 pthread_mutex_unlock(&mMutex);
3929                 goto error_exit;
3930             }
3931         }
3932 
3933         if (mChannelHandle) {
3934 
3935             rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3936                     mChannelHandle);
3937             if (rc != NO_ERROR) {
3938                 LOGE("start_channel failed %d", rc);
3939                 pthread_mutex_unlock(&mMutex);
3940                 goto error_exit;
3941             }
3942         }
3943 
3944         goto no_error;
3945 error_exit:
3946         m_perfLock.lock_rel();
3947         return rc;
3948 no_error:
3949         m_perfLock.lock_rel();
3950 
3951         mWokenUpByDaemon = false;
3952         mPendingLiveRequest = 0;
3953         mFirstConfiguration = false;
3954         enablePowerHint();
3955     }
3956 
3957     uint32_t frameNumber = request->frame_number;
3958     cam_stream_ID_t streamsArray;
3959 
3960     if (mFlushPerf) {
3961         //we cannot accept any requests during flush
3962         LOGE("process_capture_request cannot proceed during flush");
3963         pthread_mutex_unlock(&mMutex);
3964         return NO_ERROR; //should return an error
3965     }
3966 
3967     if (meta.exists(ANDROID_REQUEST_ID)) {
3968         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3969         mCurrentRequestId = request_id;
3970         LOGD("Received request with id: %d", request_id);
3971     } else if (mState == CONFIGURED || mCurrentRequestId == -1){
3972         LOGE("Unable to find request id field, \
3973                 & no previous id available");
3974         pthread_mutex_unlock(&mMutex);
3975         return NAME_NOT_FOUND;
3976     } else {
3977         LOGD("Re-using old request id");
3978         request_id = mCurrentRequestId;
3979     }
3980 
3981     LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
3982                                     request->num_output_buffers,
3983                                     request->input_buffer,
3984                                     frameNumber);
3985     // Acquire all request buffers first
3986     streamsArray.num_streams = 0;
3987     int blob_request = 0;
3988     uint32_t snapshotStreamId = 0;
3989     for (size_t i = 0; i < request->num_output_buffers; i++) {
3990         const camera3_stream_buffer_t& output = request->output_buffers[i];
3991         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3992 
3993         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3994             //Call function to store local copy of jpeg data for encode params.
3995             blob_request = 1;
3996             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3997         }
3998 
3999         if (output.acquire_fence != -1) {
4000            rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4001            close(output.acquire_fence);
4002            if (rc != OK) {
4003               LOGE("sync wait failed %d", rc);
4004               pthread_mutex_unlock(&mMutex);
4005               return rc;
4006            }
4007         }
4008 
4009         streamsArray.stream_request[streamsArray.num_streams++].streamID =
4010             channel->getStreamID(channel->getStreamTypeMask());
4011 
4012         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4013             isVidBufRequested = true;
4014         }
4015     }
4016 
4017     if (blob_request) {
4018         KPI_ATRACE_INT("SNAPSHOT", 1);
4019     }
4020     if (blob_request && mRawDumpChannel) {
4021         LOGD("Trigger Raw based on blob request if Raw dump is enabled");
4022         streamsArray.stream_request[streamsArray.num_streams].streamID =
4023             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
4024         streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
4025     }
4026 
4027     if(request->input_buffer == NULL) {
4028         /* Parse the settings:
4029          * - For every request in NORMAL MODE
4030          * - For every request in HFR mode during preview only case
4031          * - For first request of every batch in HFR mode during video
4032          * recording. In batchmode the same settings except frame number is
4033          * repeated in each request of the batch.
4034          */
4035         if (!mBatchSize ||
4036            (mBatchSize && !isVidBufRequested) ||
4037            (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
4038             rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
4039             if (rc < 0) {
4040                 LOGE("fail to set frame parameters");
4041                 pthread_mutex_unlock(&mMutex);
4042                 return rc;
4043             }
4044         }
4045         /* For batchMode HFR, setFrameParameters is not called for every
4046          * request. But only frame number of the latest request is parsed.
4047          * Keep track of first and last frame numbers in a batch so that
4048          * metadata for the frame numbers of batch can be duplicated in
4049          * handleBatchMetadta */
4050         if (mBatchSize) {
4051             if (!mToBeQueuedVidBufs) {
4052                 //start of the batch
4053                 mFirstFrameNumberInBatch = request->frame_number;
4054             }
4055             if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4056                 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4057                 LOGE("Failed to set the frame number in the parameters");
4058                 return BAD_VALUE;
4059             }
4060         }
4061         if (mNeedSensorRestart) {
4062             /* Unlock the mutex as restartSensor waits on the channels to be
4063              * stopped, which in turn calls stream callback functions -
4064              * handleBufferWithLock and handleMetadataWithLock */
4065             pthread_mutex_unlock(&mMutex);
4066             rc = dynamicUpdateMetaStreamInfo();
4067             if (rc != NO_ERROR) {
4068                 LOGE("Restarting the sensor failed");
4069                 return BAD_VALUE;
4070             }
4071             mNeedSensorRestart = false;
4072             pthread_mutex_lock(&mMutex);
4073         }
4074     } else {
4075 
4076         if (request->input_buffer->acquire_fence != -1) {
4077            rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4078            close(request->input_buffer->acquire_fence);
4079            if (rc != OK) {
4080               LOGE("input buffer sync wait failed %d", rc);
4081               pthread_mutex_unlock(&mMutex);
4082               return rc;
4083            }
4084         }
4085     }
4086 
4087     if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4088         mLastCustIntentFrmNum = frameNumber;
4089     }
4090     /* Update pending request list and pending buffers map */
4091     PendingRequestInfo pendingRequest;
4092     pendingRequestIterator latestRequest;
4093     pendingRequest.frame_number = frameNumber;
4094     pendingRequest.num_buffers = request->num_output_buffers;
4095     pendingRequest.request_id = request_id;
4096     pendingRequest.blob_request = blob_request;
4097     pendingRequest.timestamp = 0;
4098     pendingRequest.bUrgentReceived = 0;
4099     if (request->input_buffer) {
4100         pendingRequest.input_buffer =
4101                 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4102         *(pendingRequest.input_buffer) = *(request->input_buffer);
4103         pInputBuffer = pendingRequest.input_buffer;
4104     } else {
4105        pendingRequest.input_buffer = NULL;
4106        pInputBuffer = NULL;
4107     }
4108 
4109     pendingRequest.pipeline_depth = 0;
4110     pendingRequest.partial_result_cnt = 0;
4111     extractJpegMetadata(mCurJpegMeta, request);
4112     pendingRequest.jpegMetadata = mCurJpegMeta;
4113     pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4114     pendingRequest.shutter_notified = false;
4115 
4116     //extract capture intent
4117     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4118         mCaptureIntent =
4119                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4120     }
4121     if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
4122         mHybridAeEnable =
4123                 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
4124     }
4125     pendingRequest.capture_intent = mCaptureIntent;
4126     pendingRequest.hybrid_ae_enable = mHybridAeEnable;
4127     /* DevCamDebug metadata processCaptureRequest */
4128     if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4129         mDevCamDebugMetaEnable =
4130                 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4131     }
4132     pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4133     /* DevCamDebug metadata end */
4134 
4135     //extract CAC info
4136     if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4137         mCacMode =
4138                 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4139     }
4140     pendingRequest.fwkCacMode = mCacMode;
4141 
4142     PendingBuffersInRequest bufsForCurRequest;
4143     bufsForCurRequest.frame_number = frameNumber;
4144     // Mark current timestamp for the new request
4145     bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4146 
4147     for (size_t i = 0; i < request->num_output_buffers; i++) {
4148         RequestedBufferInfo requestedBuf;
4149         memset(&requestedBuf, 0, sizeof(requestedBuf));
4150         requestedBuf.stream = request->output_buffers[i].stream;
4151         requestedBuf.buffer = NULL;
4152         pendingRequest.buffers.push_back(requestedBuf);
4153 
4154         // Add to buffer handle the pending buffers list
4155         PendingBufferInfo bufferInfo;
4156         bufferInfo.buffer = request->output_buffers[i].buffer;
4157         bufferInfo.stream = request->output_buffers[i].stream;
4158         bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4159         QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4160         LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4161             frameNumber, bufferInfo.buffer,
4162             channel->getStreamTypeMask(), bufferInfo.stream->format);
4163     }
4164     // Add this request packet into mPendingBuffersMap
4165     mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4166     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4167         mPendingBuffersMap.get_num_overall_buffers());
4168 
4169     latestRequest = mPendingRequestsList.insert(
4170             mPendingRequestsList.end(), pendingRequest);
4171     if(mFlush) {
4172         LOGI("mFlush is true");
4173         pthread_mutex_unlock(&mMutex);
4174         return NO_ERROR;
4175     }
4176 
4177     int indexUsed;
4178     // Notify metadata channel we receive a request
4179     mMetadataChannel->request(NULL, frameNumber, indexUsed);
4180 
4181     if(request->input_buffer != NULL){
4182         LOGD("Input request, frame_number %d", frameNumber);
4183         rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4184         if (NO_ERROR != rc) {
4185             LOGE("fail to set reproc parameters");
4186             pthread_mutex_unlock(&mMutex);
4187             return rc;
4188         }
4189     }
4190 
4191     // Call request on other streams
4192     uint32_t streams_need_metadata = 0;
4193     pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4194     for (size_t i = 0; i < request->num_output_buffers; i++) {
4195         const camera3_stream_buffer_t& output = request->output_buffers[i];
4196         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4197 
4198         if (channel == NULL) {
4199             LOGW("invalid channel pointer for stream");
4200             continue;
4201         }
4202 
4203         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4204             LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4205                       output.buffer, request->input_buffer, frameNumber);
4206             if(request->input_buffer != NULL){
4207                 rc = channel->request(output.buffer, frameNumber,
4208                         pInputBuffer, &mReprocMeta, indexUsed);
4209                 if (rc < 0) {
4210                     LOGE("Fail to request on picture channel");
4211                     pthread_mutex_unlock(&mMutex);
4212                     return rc;
4213                 }
4214             } else {
4215                 LOGD("snapshot request with buffer %p, frame_number %d",
4216                          output.buffer, frameNumber);
4217                 if (!request->settings) {
4218                     rc = channel->request(output.buffer, frameNumber,
4219                             NULL, mPrevParameters, indexUsed);
4220                 } else {
4221                     rc = channel->request(output.buffer, frameNumber,
4222                             NULL, mParameters, indexUsed);
4223                 }
4224                 if (rc < 0) {
4225                     LOGE("Fail to request on picture channel");
4226                     pthread_mutex_unlock(&mMutex);
4227                     return rc;
4228                 }
4229 
4230                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4231                 uint32_t j = 0;
4232                 for (j = 0; j < streamsArray.num_streams; j++) {
4233                     if (streamsArray.stream_request[j].streamID == streamId) {
4234                       if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4235                           streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4236                       else
4237                           streamsArray.stream_request[j].buf_index = indexUsed;
4238                         break;
4239                     }
4240                 }
4241                 if (j == streamsArray.num_streams) {
4242                     LOGE("Did not find matching stream to update index");
4243                     assert(0);
4244                 }
4245 
4246                 pendingBufferIter->need_metadata = true;
4247                 streams_need_metadata++;
4248             }
4249         } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4250             bool needMetadata = false;
4251             QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4252             rc = yuvChannel->request(output.buffer, frameNumber,
4253                     pInputBuffer,
4254                     (pInputBuffer ? &mReprocMeta : mParameters), needMetadata, indexUsed);
4255             if (rc < 0) {
4256                 LOGE("Fail to request on YUV channel");
4257                 pthread_mutex_unlock(&mMutex);
4258                 return rc;
4259             }
4260 
4261             uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4262             uint32_t j = 0;
4263             for (j = 0; j < streamsArray.num_streams; j++) {
4264                 if (streamsArray.stream_request[j].streamID == streamId) {
4265                     if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4266                         streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4267                     else
4268                         streamsArray.stream_request[j].buf_index = indexUsed;
4269                     break;
4270                 }
4271             }
4272             if (j == streamsArray.num_streams) {
4273                 LOGE("Did not find matching stream to update index");
4274                 assert(0);
4275             }
4276 
4277             pendingBufferIter->need_metadata = needMetadata;
4278             if (needMetadata)
4279                 streams_need_metadata += 1;
4280             LOGD("calling YUV channel request, need_metadata is %d",
4281                      needMetadata);
4282         } else {
4283             LOGD("request with buffer %p, frame_number %d",
4284                   output.buffer, frameNumber);
4285 
4286             rc = channel->request(output.buffer, frameNumber, indexUsed);
4287 
4288             uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4289             uint32_t j = 0;
4290             for (j = 0; j < streamsArray.num_streams; j++) {
4291                 if (streamsArray.stream_request[j].streamID == streamId) {
4292                     if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4293                         streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4294                     else
4295                         streamsArray.stream_request[j].buf_index = indexUsed;
4296                     break;
4297                 }
4298             }
4299             if (j == streamsArray.num_streams) {
4300                 LOGE("Did not find matching stream to update index");
4301                 assert(0);
4302             }
4303 
4304             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4305                     && mBatchSize) {
4306                 mToBeQueuedVidBufs++;
4307                 if (mToBeQueuedVidBufs == mBatchSize) {
4308                     channel->queueBatchBuf();
4309                 }
4310             }
4311             if (rc < 0) {
4312                 LOGE("request failed");
4313                 pthread_mutex_unlock(&mMutex);
4314                 return rc;
4315             }
4316         }
4317         pendingBufferIter++;
4318     }
4319 
4320     //If 2 streams have need_metadata set to true, fail the request, unless
4321     //we copy/reference count the metadata buffer
4322     if (streams_need_metadata > 1) {
4323         LOGE("not supporting request in which two streams requires"
4324                 " 2 HAL metadata for reprocessing");
4325         pthread_mutex_unlock(&mMutex);
4326         return -EINVAL;
4327     }
4328 
4329     if (request->input_buffer == NULL) {
4330         /* Set the parameters to backend:
4331          * - For every request in NORMAL MODE
4332          * - For every request in HFR mode during preview only case
4333          * - Once every batch in HFR mode during video recording
4334          */
4335         if (!mBatchSize ||
4336            (mBatchSize && !isVidBufRequested) ||
4337            (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4338             LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4339                      mBatchSize, isVidBufRequested,
4340                     mToBeQueuedVidBufs);
4341 
4342             if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
4343                 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4344                     uint32_t m = 0;
4345                     for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4346                         if (streamsArray.stream_request[k].streamID ==
4347                                 mBatchedStreamsArray.stream_request[m].streamID)
4348                             break;
4349                         }
4350                         if (m == mBatchedStreamsArray.num_streams) {
4351                             mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4352                                 streamsArray.stream_request[k].streamID;
4353                             mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4354                                 streamsArray.stream_request[k].buf_index;
4355                             mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4356                         }
4357                 }
4358                 streamsArray = mBatchedStreamsArray;
4359             }
4360             /* Update stream id of all the requested buffers */
4361             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
4362                 LOGE("Failed to set stream type mask in the parameters");
4363                 return BAD_VALUE;
4364             }
4365 
4366             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4367                     mParameters);
4368             if (rc < 0) {
4369                 LOGE("set_parms failed");
4370             }
4371             /* reset to zero coz, the batch is queued */
4372             mToBeQueuedVidBufs = 0;
4373             mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
4374             memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
4375         } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
4376             for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4377                 uint32_t m = 0;
4378                 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4379                     if (streamsArray.stream_request[k].streamID ==
4380                             mBatchedStreamsArray.stream_request[m].streamID)
4381                         break;
4382                 }
4383                 if (m == mBatchedStreamsArray.num_streams) {
4384                     mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4385                         streamsArray.stream_request[k].streamID;
4386                     mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4387                         streamsArray.stream_request[k].buf_index;
4388                     mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4389                 }
4390             }
4391         }
4392         mPendingLiveRequest++;
4393     }
4394 
4395     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4396 
4397     mState = STARTED;
4398     // Added a timed condition wait
4399     struct timespec ts;
4400     uint8_t isValidTimeout = 1;
4401     rc = clock_gettime(CLOCK_REALTIME, &ts);
4402     if (rc < 0) {
4403       isValidTimeout = 0;
4404       LOGE("Error reading the real time clock!!");
4405     }
4406     else {
4407       // Make timeout as 5 sec for request to be honored
4408       ts.tv_sec += 5;
4409     }
4410     //Block on conditional variable
4411     while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
4412             (mState != ERROR) && (mState != DEINIT)) {
4413         if (!isValidTimeout) {
4414             LOGD("Blocking on conditional wait");
4415             pthread_cond_wait(&mRequestCond, &mMutex);
4416         }
4417         else {
4418             LOGD("Blocking on timed conditional wait");
4419             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4420             if (rc == ETIMEDOUT) {
4421                 rc = -ENODEV;
4422                 LOGE("Unblocked on timeout!!!!");
4423                 break;
4424             }
4425         }
4426         LOGD("Unblocked");
4427         if (mWokenUpByDaemon) {
4428             mWokenUpByDaemon = false;
4429             if (mPendingLiveRequest < mMaxInFlightRequests)
4430                 break;
4431         }
4432     }
4433     pthread_mutex_unlock(&mMutex);
4434 
4435     return rc;
4436 }
4437 
4438 /*===========================================================================
4439  * FUNCTION   : dump
4440  *
4441  * DESCRIPTION:
4442  *
4443  * PARAMETERS :
4444  *
4445  *
4446  * RETURN     :
4447  *==========================================================================*/
dump(int fd)4448 void QCamera3HardwareInterface::dump(int fd)
4449 {
4450     pthread_mutex_lock(&mMutex);
4451     dprintf(fd, "\n Camera HAL3 information Begin \n");
4452 
4453     dprintf(fd, "\nNumber of pending requests: %zu \n",
4454         mPendingRequestsList.size());
4455     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4456     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
4457     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4458     for(pendingRequestIterator i = mPendingRequestsList.begin();
4459             i != mPendingRequestsList.end(); i++) {
4460         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4461         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4462         i->input_buffer);
4463     }
4464     dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4465                 mPendingBuffersMap.get_num_overall_buffers());
4466     dprintf(fd, "-------+------------------\n");
4467     dprintf(fd, " Frame | Stream type mask \n");
4468     dprintf(fd, "-------+------------------\n");
4469     for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4470         for(auto &j : req.mPendingBufferList) {
4471             QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4472             dprintf(fd, " %5d | %11d \n",
4473                     req.frame_number, channel->getStreamTypeMask());
4474         }
4475     }
4476     dprintf(fd, "-------+------------------\n");
4477 
4478     dprintf(fd, "\nPending frame drop list: %zu\n",
4479         mPendingFrameDropList.size());
4480     dprintf(fd, "-------+-----------\n");
4481     dprintf(fd, " Frame | Stream ID \n");
4482     dprintf(fd, "-------+-----------\n");
4483     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4484         i != mPendingFrameDropList.end(); i++) {
4485         dprintf(fd, " %5d | %9d \n",
4486             i->frame_number, i->stream_ID);
4487     }
4488     dprintf(fd, "-------+-----------\n");
4489 
4490     dprintf(fd, "\n Camera HAL3 information End \n");
4491 
4492     /* use dumpsys media.camera as trigger to send update debug level event */
4493     mUpdateDebugLevel = true;
4494     pthread_mutex_unlock(&mMutex);
4495     return;
4496 }
4497 
4498 /*===========================================================================
4499  * FUNCTION   : flush
4500  *
4501  * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4502  *              conditionally restarts channels
4503  *
4504  * PARAMETERS :
4505  *  @ restartChannels: re-start all channels
4506  *
4507  *
4508  * RETURN     :
4509  *          0 on success
4510  *          Error code on failure
4511  *==========================================================================*/
flush(bool restartChannels)4512 int QCamera3HardwareInterface::flush(bool restartChannels)
4513 {
4514     KPI_ATRACE_CALL();
4515     int32_t rc = NO_ERROR;
4516 
4517     LOGD("Unblocking Process Capture Request");
4518     pthread_mutex_lock(&mMutex);
4519     mFlush = true;
4520     pthread_mutex_unlock(&mMutex);
4521 
4522     rc = stopAllChannels();
4523     // unlink of dualcam
4524     if (mIsDeviceLinked) {
4525         m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4526         pthread_mutex_lock(&gCamLock);
4527 
4528         if (mIsMainCamera == 1) {
4529             m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4530             m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
4531             // related session id should be session id of linked session
4532             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4533         } else {
4534             m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4535             m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
4536             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4537         }
4538         pthread_mutex_unlock(&gCamLock);
4539 
4540         rc = mCameraHandle->ops->sync_related_sensors(
4541                 mCameraHandle->camera_handle, m_pRelCamSyncBuf);
4542         if (rc < 0) {
4543             LOGE("Dualcam: Unlink failed, but still proceed to close");
4544         }
4545     }
4546 
4547     if (rc < 0) {
4548         LOGE("stopAllChannels failed");
4549         return rc;
4550     }
4551     if (mChannelHandle) {
4552         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4553                 mChannelHandle);
4554     }
4555 
4556     // Reset bundle info
4557     rc = setBundleInfo();
4558     if (rc < 0) {
4559         LOGE("setBundleInfo failed %d", rc);
4560         return rc;
4561     }
4562 
4563     // Mutex Lock
4564     pthread_mutex_lock(&mMutex);
4565 
4566     // Unblock process_capture_request
4567     mPendingLiveRequest = 0;
4568     pthread_cond_signal(&mRequestCond);
4569 
4570     rc = notifyErrorForPendingRequests();
4571     if (rc < 0) {
4572         LOGE("notifyErrorForPendingRequests failed");
4573         pthread_mutex_unlock(&mMutex);
4574         return rc;
4575     }
4576 
4577     mFlush = false;
4578 
4579     // Start the Streams/Channels
4580     if (restartChannels) {
4581         rc = startAllChannels();
4582         if (rc < 0) {
4583             LOGE("startAllChannels failed");
4584             pthread_mutex_unlock(&mMutex);
4585             return rc;
4586         }
4587     }
4588 
4589     if (mChannelHandle) {
4590         mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4591                     mChannelHandle);
4592         if (rc < 0) {
4593             LOGE("start_channel failed");
4594             pthread_mutex_unlock(&mMutex);
4595             return rc;
4596         }
4597     }
4598 
4599     pthread_mutex_unlock(&mMutex);
4600 
4601     return 0;
4602 }
4603 
4604 /*===========================================================================
4605  * FUNCTION   : flushPerf
4606  *
4607  * DESCRIPTION: This is the performance optimization version of flush that does
4608  *              not use stream off, rather flushes the system
4609  *
4610  * PARAMETERS :
4611  *
4612  *
4613  * RETURN     : 0 : success
4614  *              -EINVAL: input is malformed (device is not valid)
4615  *              -ENODEV: if the device has encountered a serious error
4616  *==========================================================================*/
flushPerf()4617 int QCamera3HardwareInterface::flushPerf()
4618 {
4619     ATRACE_CALL();
4620     int32_t rc = 0;
4621     struct timespec timeout;
4622     bool timed_wait = false;
4623 
4624     pthread_mutex_lock(&mMutex);
4625     mFlushPerf = true;
4626     mPendingBuffersMap.numPendingBufsAtFlush =
4627         mPendingBuffersMap.get_num_overall_buffers();
4628     LOGD("Calling flush. Wait for %d buffers to return",
4629         mPendingBuffersMap.numPendingBufsAtFlush);
4630 
4631     /* send the flush event to the backend */
4632     rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4633     if (rc < 0) {
4634         LOGE("Error in flush: IOCTL failure");
4635         mFlushPerf = false;
4636         pthread_mutex_unlock(&mMutex);
4637         return -ENODEV;
4638     }
4639 
4640     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4641         LOGD("No pending buffers in HAL, return flush");
4642         mFlushPerf = false;
4643         pthread_mutex_unlock(&mMutex);
4644         return rc;
4645     }
4646 
4647     /* wait on a signal that buffers were received */
4648     rc = clock_gettime(CLOCK_REALTIME, &timeout);
4649     if (rc < 0) {
4650         LOGE("Error reading the real time clock, cannot use timed wait");
4651     } else {
4652         timeout.tv_sec += FLUSH_TIMEOUT;
4653         timed_wait = true;
4654     }
4655 
4656     //Block on conditional variable
4657     while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4658         LOGD("Waiting on mBuffersCond");
4659         if (!timed_wait) {
4660             rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4661             if (rc != 0) {
4662                  LOGE("pthread_cond_wait failed due to rc = %s",
4663                         strerror(rc));
4664                  break;
4665             }
4666         } else {
4667             rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4668             if (rc != 0) {
4669                 LOGE("pthread_cond_timedwait failed due to rc = %s",
4670                             strerror(rc));
4671                 break;
4672             }
4673         }
4674     }
4675     if (rc != 0) {
4676         mFlushPerf = false;
4677         pthread_mutex_unlock(&mMutex);
4678         return -ENODEV;
4679     }
4680 
4681     LOGD("Received buffers, now safe to return them");
4682 
4683     //make sure the channels handle flush
4684     //currently only required for the picture channel to release snapshot resources
4685     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4686             it != mStreamInfo.end(); it++) {
4687         QCamera3Channel *channel = (*it)->channel;
4688         if (channel) {
4689             rc = channel->flush();
4690             if (rc) {
4691                LOGE("Flushing the channels failed with error %d", rc);
4692                // even though the channel flush failed we need to continue and
4693                // return the buffers we have to the framework, however the return
4694                // value will be an error
4695                rc = -ENODEV;
4696             }
4697         }
4698     }
4699 
4700     /* notify the frameworks and send errored results */
4701     rc = notifyErrorForPendingRequests();
4702     if (rc < 0) {
4703         LOGE("notifyErrorForPendingRequests failed");
4704         pthread_mutex_unlock(&mMutex);
4705         return rc;
4706     }
4707 
4708     //unblock process_capture_request
4709     mPendingLiveRequest = 0;
4710     unblockRequestIfNecessary();
4711 
4712     mFlushPerf = false;
4713     pthread_mutex_unlock(&mMutex);
4714     LOGD ("Flush Operation complete. rc = %d", rc);
4715     return rc;
4716 }
4717 
4718 /*===========================================================================
4719  * FUNCTION   : handleCameraDeviceError
4720  *
4721  * DESCRIPTION: This function calls internal flush and notifies the error to
4722  *              framework and updates the state variable.
4723  *
4724  * PARAMETERS : None
4725  *
4726  * RETURN     : NO_ERROR on Success
4727  *              Error code on failure
4728  *==========================================================================*/
handleCameraDeviceError()4729 int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4730 {
4731     int32_t rc = NO_ERROR;
4732 
4733     pthread_mutex_lock(&mMutex);
4734     if (mState != ERROR) {
4735         //if mState != ERROR, nothing to be done
4736         pthread_mutex_unlock(&mMutex);
4737         return NO_ERROR;
4738     }
4739     pthread_mutex_unlock(&mMutex);
4740 
4741     rc = flush(false /* restart channels */);
4742     if (NO_ERROR != rc) {
4743         LOGE("internal flush to handle mState = ERROR failed");
4744     }
4745 
4746     pthread_mutex_lock(&mMutex);
4747     mState = DEINIT;
4748     pthread_mutex_unlock(&mMutex);
4749 
4750     camera3_notify_msg_t notify_msg;
4751     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4752     notify_msg.type = CAMERA3_MSG_ERROR;
4753     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4754     notify_msg.message.error.error_stream = NULL;
4755     notify_msg.message.error.frame_number = 0;
4756     mCallbackOps->notify(mCallbackOps, &notify_msg);
4757 
4758     return rc;
4759 }
4760 
4761 /*===========================================================================
4762  * FUNCTION   : captureResultCb
4763  *
4764  * DESCRIPTION: Callback handler for all capture result
4765  *              (streams, as well as metadata)
4766  *
4767  * PARAMETERS :
4768  *   @metadata : metadata information
4769  *   @buffer   : actual gralloc buffer to be returned to frameworks.
4770  *               NULL if metadata.
4771  *
4772  * RETURN     : NONE
4773  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer)4774 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4775                 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4776 {
4777     if (metadata_buf) {
4778         if (mBatchSize) {
4779             handleBatchMetadata(metadata_buf,
4780                     true /* free_and_bufdone_meta_buf */);
4781         } else { /* mBatchSize = 0 */
4782             hdrPlusPerfLock(metadata_buf);
4783             pthread_mutex_lock(&mMutex);
4784             handleMetadataWithLock(metadata_buf,
4785                     true /* free_and_bufdone_meta_buf */,
4786                     false /* first frame of batch metadata */ );
4787             pthread_mutex_unlock(&mMutex);
4788         }
4789     } else if (isInputBuffer) {
4790         pthread_mutex_lock(&mMutex);
4791         handleInputBufferWithLock(frame_number);
4792         pthread_mutex_unlock(&mMutex);
4793     } else {
4794         pthread_mutex_lock(&mMutex);
4795         handleBufferWithLock(buffer, frame_number);
4796         pthread_mutex_unlock(&mMutex);
4797     }
4798     return;
4799 }
4800 
4801 /*===========================================================================
4802  * FUNCTION   : getReprocessibleOutputStreamId
4803  *
4804  * DESCRIPTION: Get source output stream id for the input reprocess stream
4805  *              based on size and format, which would be the largest
4806  *              output stream if an input stream exists.
4807  *
4808  * PARAMETERS :
4809  *   @id      : return the stream id if found
4810  *
4811  * RETURN     : int32_t type of status
4812  *              NO_ERROR  -- success
4813  *              none-zero failure code
4814  *==========================================================================*/
getReprocessibleOutputStreamId(uint32_t & id)4815 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4816 {
4817     /* check if any output or bidirectional stream with the same size and format
4818        and return that stream */
4819     if ((mInputStreamInfo.dim.width > 0) &&
4820             (mInputStreamInfo.dim.height > 0)) {
4821         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4822                 it != mStreamInfo.end(); it++) {
4823 
4824             camera3_stream_t *stream = (*it)->stream;
4825             if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4826                     (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4827                     (stream->format == mInputStreamInfo.format)) {
4828                 // Usage flag for an input stream and the source output stream
4829                 // may be different.
4830                 LOGD("Found reprocessible output stream! %p", *it);
4831                 LOGD("input stream usage 0x%x, current stream usage 0x%x",
4832                          stream->usage, mInputStreamInfo.usage);
4833 
4834                 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4835                 if (channel != NULL && channel->mStreams[0]) {
4836                     id = channel->mStreams[0]->getMyServerID();
4837                     return NO_ERROR;
4838                 }
4839             }
4840         }
4841     } else {
4842         LOGD("No input stream, so no reprocessible output stream");
4843     }
4844     return NAME_NOT_FOUND;
4845 }
4846 
4847 /*===========================================================================
4848  * FUNCTION   : lookupFwkName
4849  *
4850  * DESCRIPTION: In case the enum is not same in fwk and backend
4851  *              make sure the parameter is correctly propogated
4852  *
4853  * PARAMETERS  :
4854  *   @arr      : map between the two enums
4855  *   @len      : len of the map
4856  *   @hal_name : name of the hal_parm to map
4857  *
4858  * RETURN     : int type of status
4859  *              fwk_name  -- success
4860  *              none-zero failure code
4861  *==========================================================================*/
lookupFwkName(const mapType * arr,size_t len,halType hal_name)4862 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4863         size_t len, halType hal_name)
4864 {
4865 
4866     for (size_t i = 0; i < len; i++) {
4867         if (arr[i].hal_name == hal_name) {
4868             return arr[i].fwk_name;
4869         }
4870     }
4871 
4872     /* Not able to find matching framework type is not necessarily
4873      * an error case. This happens when mm-camera supports more attributes
4874      * than the frameworks do */
4875     LOGH("Cannot find matching framework type");
4876     return NAME_NOT_FOUND;
4877 }
4878 
4879 /*===========================================================================
4880  * FUNCTION   : lookupHalName
4881  *
4882  * DESCRIPTION: In case the enum is not same in fwk and backend
4883  *              make sure the parameter is correctly propogated
4884  *
4885  * PARAMETERS  :
4886  *   @arr      : map between the two enums
4887  *   @len      : len of the map
4888  *   @fwk_name : name of the hal_parm to map
4889  *
4890  * RETURN     : int32_t type of status
4891  *              hal_name  -- success
4892  *              none-zero failure code
4893  *==========================================================================*/
lookupHalName(const mapType * arr,size_t len,fwkType fwk_name)4894 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4895         size_t len, fwkType fwk_name)
4896 {
4897     for (size_t i = 0; i < len; i++) {
4898         if (arr[i].fwk_name == fwk_name) {
4899             return arr[i].hal_name;
4900         }
4901     }
4902 
4903     LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
4904     return NAME_NOT_FOUND;
4905 }
4906 
4907 /*===========================================================================
4908  * FUNCTION   : lookupProp
4909  *
4910  * DESCRIPTION: lookup a value by its name
4911  *
4912  * PARAMETERS :
4913  *   @arr     : map between the two enums
4914  *   @len     : size of the map
4915  *   @name    : name to be looked up
4916  *
4917  * RETURN     : Value if found
4918  *              CAM_CDS_MODE_MAX if not found
4919  *==========================================================================*/
lookupProp(const mapType * arr,size_t len,const char * name)4920 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4921         size_t len, const char *name)
4922 {
4923     if (name) {
4924         for (size_t i = 0; i < len; i++) {
4925             if (!strcmp(arr[i].desc, name)) {
4926                 return arr[i].val;
4927             }
4928         }
4929     }
4930     return CAM_CDS_MODE_MAX;
4931 }
4932 
4933 /*===========================================================================
4934  *
4935  * DESCRIPTION:
4936  *
4937  * PARAMETERS :
4938  *   @metadata : metadata information from callback
4939  *   @timestamp: metadata buffer timestamp
4940  *   @request_id: request id
4941  *   @jpegMetadata: additional jpeg metadata
4942  *   @hybrid_ae_enable: whether hybrid ae is enabled
4943  *   // DevCamDebug metadata
4944  *   @DevCamDebug_meta_enable: enable DevCamDebug meta
4945  *   // DevCamDebug metadata end
4946  *   @pprocDone: whether internal offline postprocsesing is done
4947  *
4948  * RETURN     : camera_metadata_t*
4949  *              metadata in a format specified by fwk
4950  *==========================================================================*/
4951 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,nsecs_t timestamp,int32_t request_id,const CameraMetadata & jpegMetadata,uint8_t pipeline_depth,uint8_t capture_intent,uint8_t hybrid_ae_enable,uint8_t DevCamDebug_meta_enable,bool pprocDone,uint8_t fwk_cacMode,bool firstMetadataInBatch)4952 QCamera3HardwareInterface::translateFromHalMetadata(
4953                                  metadata_buffer_t *metadata,
4954                                  nsecs_t timestamp,
4955                                  int32_t request_id,
4956                                  const CameraMetadata& jpegMetadata,
4957                                  uint8_t pipeline_depth,
4958                                  uint8_t capture_intent,
4959                                  uint8_t hybrid_ae_enable,
4960                                  /* DevCamDebug metadata translateFromHalMetadata argument */
4961                                  uint8_t DevCamDebug_meta_enable,
4962                                  /* DevCamDebug metadata end */
4963                                  bool pprocDone,
4964                                  uint8_t fwk_cacMode,
4965                                  bool firstMetadataInBatch)
4966 {
4967     CameraMetadata camMetadata;
4968     camera_metadata_t *resultMetadata;
4969 
4970     if (mBatchSize && !firstMetadataInBatch) {
4971         /* In batch mode, use cached metadata from the first metadata
4972             in the batch */
4973         camMetadata.clear();
4974         camMetadata = mCachedMetadata;
4975     }
4976 
4977     if (jpegMetadata.entryCount())
4978         camMetadata.append(jpegMetadata);
4979 
4980     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4981     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4982     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4983     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4984     camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
4985     if (mBatchSize == 0) {
4986         // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
4987         camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
4988     }
4989 
4990     if (mBatchSize && !firstMetadataInBatch) {
4991         /* In batch mode, use cached metadata instead of parsing metadata buffer again */
4992         resultMetadata = camMetadata.release();
4993         return resultMetadata;
4994     }
4995 
4996     // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
4997     // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
4998     if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
4999         // DevCamDebug metadata translateFromHalMetadata AF
5000         IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
5001                 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
5002             int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
5003             camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
5004         }
5005         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
5006                 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
5007             int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
5008             camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
5009         }
5010         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
5011                 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
5012             int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
5013             camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
5014         }
5015         IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
5016                 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
5017             int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
5018             camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
5019         }
5020         IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
5021                 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
5022             int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
5023             camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
5024         }
5025         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
5026                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
5027             int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
5028                 *DevCamDebug_af_monitor_pdaf_target_pos;
5029             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
5030                 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
5031         }
5032         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
5033                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
5034             int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
5035                 *DevCamDebug_af_monitor_pdaf_confidence;
5036             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
5037                 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
5038         }
5039         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
5040                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
5041             int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
5042             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
5043                 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
5044         }
5045         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
5046                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
5047             int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
5048                 *DevCamDebug_af_monitor_tof_target_pos;
5049             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
5050                 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
5051         }
5052         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
5053                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
5054             int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
5055                 *DevCamDebug_af_monitor_tof_confidence;
5056             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
5057                 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
5058         }
5059         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
5060                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
5061             int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
5062             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
5063                 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
5064         }
5065         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
5066                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
5067             int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
5068             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
5069                 &fwk_DevCamDebug_af_monitor_type_select, 1);
5070         }
5071         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
5072                 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
5073             int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
5074             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
5075                 &fwk_DevCamDebug_af_monitor_refocus, 1);
5076         }
5077         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
5078                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
5079             int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
5080             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
5081                 &fwk_DevCamDebug_af_monitor_target_pos, 1);
5082         }
5083         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
5084                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
5085             int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
5086                 *DevCamDebug_af_search_pdaf_target_pos;
5087             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
5088                 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
5089         }
5090         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
5091                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
5092             int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
5093             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
5094                 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
5095         }
5096         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
5097                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
5098             int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
5099             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
5100                 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
5101         }
5102         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
5103                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
5104             int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
5105             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
5106                 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
5107         }
5108         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
5109                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
5110             int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
5111             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
5112                 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
5113         }
5114         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
5115                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
5116             int32_t fwk_DevCamDebug_af_search_tof_target_pos =
5117                 *DevCamDebug_af_search_tof_target_pos;
5118             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
5119                 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
5120         }
5121         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
5122                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
5123             int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
5124             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
5125                 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
5126         }
5127         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
5128                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
5129             int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
5130             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
5131                 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
5132         }
5133         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
5134                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
5135             int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
5136             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
5137                 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
5138         }
5139         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
5140                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
5141             int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
5142             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
5143                 &fwk_DevCamDebug_af_search_tof_confidence, 1);
5144         }
5145         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
5146                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
5147             int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
5148             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
5149                 &fwk_DevCamDebug_af_search_type_select, 1);
5150         }
5151         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
5152                 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
5153             int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
5154             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
5155                 &fwk_DevCamDebug_af_search_next_pos, 1);
5156         }
5157         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
5158                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
5159             int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
5160             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
5161                 &fwk_DevCamDebug_af_search_target_pos, 1);
5162         }
5163         // DevCamDebug metadata translateFromHalMetadata AEC
5164         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
5165                 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
5166             int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
5167             camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
5168     }
5169         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
5170                 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
5171             int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
5172             camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
5173         }
5174         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
5175                 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
5176             int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
5177             camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
5178         }
5179         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
5180                 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
5181             int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
5182             camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
5183         }
5184         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
5185                 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
5186             int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
5187             camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
5188         }
5189         IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
5190                 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
5191             float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
5192             camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
5193         }
5194         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
5195                 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
5196             int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
5197             camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
5198         }
5199         IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
5200                 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
5201             float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
5202             camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
5203         }
5204         // DevCamDebug metadata translateFromHalMetadata AWB
5205         IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
5206                 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
5207             float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
5208             camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
5209         }
5210         IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
5211                 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
5212             float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
5213             camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
5214         }
5215         IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
5216                 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
5217             float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
5218             camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
5219         }
5220         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
5221                 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
5222             int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
5223             camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
5224         }
5225         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
5226                 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
5227             int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
5228             camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
5229         }
5230     }
5231     // atrace_end(ATRACE_TAG_ALWAYS);
5232 
5233     IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
5234         int64_t fwk_frame_number = *frame_number;
5235         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
5236     }
5237 
5238     IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
5239         int32_t fps_range[2];
5240         fps_range[0] = (int32_t)float_range->min_fps;
5241         fps_range[1] = (int32_t)float_range->max_fps;
5242         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5243                                       fps_range, 2);
5244         LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
5245              fps_range[0], fps_range[1]);
5246     }
5247 
5248     IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5249         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5250     }
5251 
5252     IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5253         int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5254                 METADATA_MAP_SIZE(SCENE_MODES_MAP),
5255                 *sceneMode);
5256         if (NAME_NOT_FOUND != val) {
5257             uint8_t fwkSceneMode = (uint8_t)val;
5258             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5259             LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5260                      fwkSceneMode);
5261         }
5262     }
5263 
5264     IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5265         uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5266         camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5267     }
5268 
5269     IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5270         uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5271         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5272     }
5273 
5274     IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5275         uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5276         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5277     }
5278 
5279     IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5280             CAM_INTF_META_EDGE_MODE, metadata) {
5281         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5282     }
5283 
5284     IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5285         uint8_t fwk_flashPower = (uint8_t) *flashPower;
5286         camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5287     }
5288 
5289     IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5290         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5291     }
5292 
5293     IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5294         if (0 <= *flashState) {
5295             uint8_t fwk_flashState = (uint8_t) *flashState;
5296             if (!gCamCapability[mCameraId]->flash_available) {
5297                 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5298             }
5299             camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5300         }
5301     }
5302 
5303     IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5304         int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5305         if (NAME_NOT_FOUND != val) {
5306             uint8_t fwk_flashMode = (uint8_t)val;
5307             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5308         }
5309     }
5310 
5311     IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5312         uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5313         camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5314     }
5315 
5316     IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5317         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5318     }
5319 
5320     IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5321         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5322     }
5323 
5324     IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5325         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5326     }
5327 
5328     IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5329         uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5330         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5331     }
5332 
5333     IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5334         uint8_t fwk_videoStab = (uint8_t) *videoStab;
5335         LOGD("fwk_videoStab = %d", fwk_videoStab);
5336         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5337     } else {
5338         // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5339         // and so hardcoding the Video Stab result to OFF mode.
5340         uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5341         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
5342         LOGD("%s: EIS result default to OFF mode", __func__);
5343     }
5344 
5345     IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5346         uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5347         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5348     }
5349 
5350     IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5351         camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5352     }
5353 
5354     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5355         CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
5356         float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
5357 
5358         adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
5359               gCamCapability[mCameraId]->color_arrangement);
5360 
5361         LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
5362           blackLevelAppliedPattern->cam_black_level[0],
5363           blackLevelAppliedPattern->cam_black_level[1],
5364           blackLevelAppliedPattern->cam_black_level[2],
5365           blackLevelAppliedPattern->cam_black_level[3]);
5366         camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
5367                 BLACK_LEVEL_PATTERN_CNT);
5368 
5369         // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
5370         // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
5371         // depth space.
5372         fwk_blackLevelInd[0] /= 4.0;
5373         fwk_blackLevelInd[1] /= 4.0;
5374         fwk_blackLevelInd[2] /= 4.0;
5375         fwk_blackLevelInd[3] /= 4.0;
5376         camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
5377                 BLACK_LEVEL_PATTERN_CNT);
5378     }
5379 
5380     // Fixed whitelevel is used by ISP/Sensor
5381     camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
5382             &gCamCapability[mCameraId]->white_level, 1);
5383 
5384     IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
5385             CAM_INTF_META_SCALER_CROP_REGION, metadata) {
5386         int32_t scalerCropRegion[4];
5387         scalerCropRegion[0] = hScalerCropRegion->left;
5388         scalerCropRegion[1] = hScalerCropRegion->top;
5389         scalerCropRegion[2] = hScalerCropRegion->width;
5390         scalerCropRegion[3] = hScalerCropRegion->height;
5391 
5392         // Adjust crop region from sensor output coordinate system to active
5393         // array coordinate system.
5394         mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
5395                 scalerCropRegion[2], scalerCropRegion[3]);
5396 
5397         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
5398     }
5399 
5400     IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
5401         LOGD("sensorExpTime = %lld", *sensorExpTime);
5402         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
5403     }
5404 
5405     IF_META_AVAILABLE(int64_t, sensorFameDuration,
5406             CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
5407         LOGD("sensorFameDuration = %lld", *sensorFameDuration);
5408         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
5409     }
5410 
5411     IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
5412             CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
5413         LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
5414         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
5415                 sensorRollingShutterSkew, 1);
5416     }
5417 
5418     IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
5419         LOGD("sensorSensitivity = %d", *sensorSensitivity);
5420         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
5421 
5422         //calculate the noise profile based on sensitivity
5423         double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
5424         double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
5425         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
5426         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
5427             noise_profile[i]   = noise_profile_S;
5428             noise_profile[i+1] = noise_profile_O;
5429         }
5430         LOGD("noise model entry (S, O) is (%f, %f)",
5431                 noise_profile_S, noise_profile_O);
5432         camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
5433                 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
5434     }
5435 
5436     int32_t fwk_ispSensitivity = 100;
5437     IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
5438         fwk_ispSensitivity = (int32_t) *ispSensitivity;
5439     }
5440     IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
5441         fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
5442     }
5443     camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
5444 
5445     IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
5446         uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
5447         camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
5448     }
5449 
5450     IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
5451         int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
5452                 *faceDetectMode);
5453         if (NAME_NOT_FOUND != val) {
5454             uint8_t fwk_faceDetectMode = (uint8_t)val;
5455             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
5456 
5457             if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
5458                 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
5459                         CAM_INTF_META_FACE_DETECTION, metadata) {
5460                     uint8_t numFaces = MIN(
5461                             faceDetectionInfo->num_faces_detected, MAX_ROI);
5462                     int32_t faceIds[MAX_ROI];
5463                     uint8_t faceScores[MAX_ROI];
5464                     int32_t faceRectangles[MAX_ROI * 4];
5465                     int32_t faceLandmarks[MAX_ROI * 6];
5466                     size_t j = 0, k = 0;
5467 
5468                     for (size_t i = 0; i < numFaces; i++) {
5469                         faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
5470                         // Adjust crop region from sensor output coordinate system to active
5471                         // array coordinate system.
5472                         cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
5473                         mCropRegionMapper.toActiveArray(rect.left, rect.top,
5474                                 rect.width, rect.height);
5475 
5476                         convertToRegions(faceDetectionInfo->faces[i].face_boundary,
5477                                 faceRectangles+j, -1);
5478 
5479                         j+= 4;
5480                     }
5481                     if (numFaces <= 0) {
5482                         memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
5483                         memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
5484                         memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
5485                         memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
5486                     }
5487 
5488                     camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
5489                             numFaces);
5490                     camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
5491                             faceRectangles, numFaces * 4U);
5492                     if (fwk_faceDetectMode ==
5493                             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
5494                         IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
5495                                 CAM_INTF_META_FACE_LANDMARK, metadata) {
5496 
5497                             for (size_t i = 0; i < numFaces; i++) {
5498                                 // Map the co-ordinate sensor output coordinate system to active
5499                                 // array coordinate system.
5500                                 mCropRegionMapper.toActiveArray(
5501                                         landmarks->face_landmarks[i].left_eye_center.x,
5502                                         landmarks->face_landmarks[i].left_eye_center.y);
5503                                 mCropRegionMapper.toActiveArray(
5504                                         landmarks->face_landmarks[i].right_eye_center.x,
5505                                         landmarks->face_landmarks[i].right_eye_center.y);
5506                                 mCropRegionMapper.toActiveArray(
5507                                         landmarks->face_landmarks[i].mouth_center.x,
5508                                         landmarks->face_landmarks[i].mouth_center.y);
5509 
5510                                 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
5511                                 k+= 6;
5512                             }
5513                         }
5514 
5515                         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
5516                         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
5517                                 faceLandmarks, numFaces * 6U);
5518                    }
5519                 }
5520             }
5521         }
5522     }
5523 
5524     IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
5525         uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
5526         camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
5527     }
5528 
5529     IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
5530             CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
5531         uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
5532         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
5533     }
5534 
5535     IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
5536             CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
5537         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
5538                 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
5539     }
5540 
5541     IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
5542             CAM_INTF_META_LENS_SHADING_MAP, metadata) {
5543         size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
5544                 CAM_MAX_SHADING_MAP_HEIGHT);
5545         size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
5546                 CAM_MAX_SHADING_MAP_WIDTH);
5547         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
5548                 lensShadingMap->lens_shading, 4U * map_width * map_height);
5549     }
5550 
5551     IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
5552         uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
5553         camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
5554     }
5555 
5556     IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
5557         //Populate CAM_INTF_META_TONEMAP_CURVES
5558         /* ch0 = G, ch 1 = B, ch 2 = R*/
5559         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5560             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5561                      tonemap->tonemap_points_cnt,
5562                     CAM_MAX_TONEMAP_CURVE_SIZE);
5563             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5564         }
5565 
5566         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
5567                         &tonemap->curves[0].tonemap_points[0][0],
5568                         tonemap->tonemap_points_cnt * 2);
5569 
5570         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
5571                         &tonemap->curves[1].tonemap_points[0][0],
5572                         tonemap->tonemap_points_cnt * 2);
5573 
5574         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
5575                         &tonemap->curves[2].tonemap_points[0][0],
5576                         tonemap->tonemap_points_cnt * 2);
5577     }
5578 
5579     IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
5580             CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
5581         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
5582                 CC_GAINS_COUNT);
5583     }
5584 
5585     IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
5586             CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
5587         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
5588                 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
5589                 CC_MATRIX_COLS * CC_MATRIX_ROWS);
5590     }
5591 
5592     IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
5593             CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
5594         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5595             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5596                      toneCurve->tonemap_points_cnt,
5597                     CAM_MAX_TONEMAP_CURVE_SIZE);
5598             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5599         }
5600         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
5601                 (float*)toneCurve->curve.tonemap_points,
5602                 toneCurve->tonemap_points_cnt * 2);
5603     }
5604 
5605     IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
5606             CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
5607         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
5608                 predColorCorrectionGains->gains, 4);
5609     }
5610 
5611     IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
5612             CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
5613         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
5614                 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
5615                 CC_MATRIX_ROWS * CC_MATRIX_COLS);
5616     }
5617 
5618     IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
5619         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
5620     }
5621 
5622     IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
5623         uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
5624         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
5625     }
5626 
5627     IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
5628         uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
5629         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
5630     }
5631 
5632     IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
5633         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5634                 *effectMode);
5635         if (NAME_NOT_FOUND != val) {
5636             uint8_t fwk_effectMode = (uint8_t)val;
5637             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
5638         }
5639     }
5640 
5641     IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
5642             CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
5643         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
5644                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
5645         if (NAME_NOT_FOUND != fwk_testPatternMode) {
5646             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
5647         }
5648         int32_t fwk_testPatternData[4];
5649         fwk_testPatternData[0] = testPatternData->r;
5650         fwk_testPatternData[3] = testPatternData->b;
5651         switch (gCamCapability[mCameraId]->color_arrangement) {
5652         case CAM_FILTER_ARRANGEMENT_RGGB:
5653         case CAM_FILTER_ARRANGEMENT_GRBG:
5654             fwk_testPatternData[1] = testPatternData->gr;
5655             fwk_testPatternData[2] = testPatternData->gb;
5656             break;
5657         case CAM_FILTER_ARRANGEMENT_GBRG:
5658         case CAM_FILTER_ARRANGEMENT_BGGR:
5659             fwk_testPatternData[2] = testPatternData->gr;
5660             fwk_testPatternData[1] = testPatternData->gb;
5661             break;
5662         default:
5663             LOGE("color arrangement %d is not supported",
5664                 gCamCapability[mCameraId]->color_arrangement);
5665             break;
5666         }
5667         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
5668     }
5669 
5670     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
5671         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
5672     }
5673 
5674     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
5675         String8 str((const char *)gps_methods);
5676         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
5677     }
5678 
5679     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
5680         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
5681     }
5682 
5683     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
5684         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
5685     }
5686 
5687     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
5688         uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
5689         camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
5690     }
5691 
5692     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
5693         uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
5694         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
5695     }
5696 
5697     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5698         int32_t fwk_thumb_size[2];
5699         fwk_thumb_size[0] = thumb_size->width;
5700         fwk_thumb_size[1] = thumb_size->height;
5701         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5702     }
5703 
5704     IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5705         camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5706                 privateData,
5707                 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5708     }
5709 
5710     if (metadata->is_tuning_params_valid) {
5711         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5712         uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5713         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5714 
5715 
5716         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5717                 sizeof(uint32_t));
5718         data += sizeof(uint32_t);
5719 
5720         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5721                 sizeof(uint32_t));
5722         LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5723         data += sizeof(uint32_t);
5724 
5725         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5726                 sizeof(uint32_t));
5727         LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5728         data += sizeof(uint32_t);
5729 
5730         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5731                 sizeof(uint32_t));
5732         LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5733         data += sizeof(uint32_t);
5734 
5735         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5736                 sizeof(uint32_t));
5737         LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5738         data += sizeof(uint32_t);
5739 
5740         metadata->tuning_params.tuning_mod3_data_size = 0;
5741         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5742                 sizeof(uint32_t));
5743         LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5744         data += sizeof(uint32_t);
5745 
5746         size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5747                 TUNING_SENSOR_DATA_MAX);
5748         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5749                 count);
5750         data += count;
5751 
5752         count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5753                 TUNING_VFE_DATA_MAX);
5754         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5755                 count);
5756         data += count;
5757 
5758         count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5759                 TUNING_CPP_DATA_MAX);
5760         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5761                 count);
5762         data += count;
5763 
5764         count = MIN(metadata->tuning_params.tuning_cac_data_size,
5765                 TUNING_CAC_DATA_MAX);
5766         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5767                 count);
5768         data += count;
5769 
5770         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5771                 (int32_t *)(void *)tuning_meta_data_blob,
5772                 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5773     }
5774 
5775     IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5776             CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5777         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5778                 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5779                 NEUTRAL_COL_POINTS);
5780     }
5781 
5782     IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5783         uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5784         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5785     }
5786 
5787     IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5788         int32_t aeRegions[REGIONS_TUPLE_COUNT];
5789         // Adjust crop region from sensor output coordinate system to active
5790         // array coordinate system.
5791         mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5792                 hAeRegions->rect.width, hAeRegions->rect.height);
5793 
5794         convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5795         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5796                 REGIONS_TUPLE_COUNT);
5797         LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5798                  aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5799                 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5800                 hAeRegions->rect.height);
5801     }
5802 
5803     IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
5804         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
5805         if (NAME_NOT_FOUND != val) {
5806             uint8_t fwkAfMode = (uint8_t)val;
5807             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
5808             LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
5809         } else {
5810             LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
5811                     val);
5812         }
5813     }
5814 
5815     IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5816         uint8_t fwk_afState = (uint8_t) *afState;
5817         camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5818         LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5819     }
5820 
5821     IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5822         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5823     }
5824 
5825     IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5826         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5827     }
5828 
5829     IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5830         uint8_t fwk_lensState = *lensState;
5831         camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5832     }
5833 
5834     IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5835         /*af regions*/
5836         int32_t afRegions[REGIONS_TUPLE_COUNT];
5837         // Adjust crop region from sensor output coordinate system to active
5838         // array coordinate system.
5839         mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5840                 hAfRegions->rect.width, hAfRegions->rect.height);
5841 
5842         convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5843         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5844                 REGIONS_TUPLE_COUNT);
5845         LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5846                  afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5847                 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5848                 hAfRegions->rect.height);
5849     }
5850 
5851     IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5852         uint32_t ab_mode = *hal_ab_mode;
5853         if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
5854                 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
5855               ab_mode = CAM_ANTIBANDING_MODE_AUTO;
5856         }
5857         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5858                 ab_mode);
5859         if (NAME_NOT_FOUND != val) {
5860             uint8_t fwk_ab_mode = (uint8_t)val;
5861             camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5862         }
5863     }
5864 
5865     IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5866         int val = lookupFwkName(SCENE_MODES_MAP,
5867                 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
5868         if (NAME_NOT_FOUND != val) {
5869             uint8_t fwkBestshotMode = (uint8_t)val;
5870             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
5871             LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
5872         } else {
5873             LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
5874         }
5875     }
5876 
5877     IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
5878          uint8_t fwk_mode = (uint8_t) *mode;
5879          camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
5880     }
5881 
5882     /* Constant metadata values to be update*/
5883     uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
5884     camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
5885 
5886     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5887     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5888 
5889     int32_t hotPixelMap[2];
5890     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
5891 
5892     // CDS
5893     IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
5894         camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
5895     }
5896 
5897     // TNR
5898     IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
5899         uint8_t tnr_enable       = tnr->denoise_enable;
5900         int32_t tnr_process_type = (int32_t)tnr->process_plates;
5901 
5902         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
5903         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
5904     }
5905 
5906     // Reprocess crop data
5907     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
5908         uint8_t cnt = crop_data->num_of_streams;
5909         if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
5910             // mm-qcamera-daemon only posts crop_data for streams
5911             // not linked to pproc. So no valid crop metadata is not
5912             // necessarily an error case.
5913             LOGD("No valid crop metadata entries");
5914         } else {
5915             uint32_t reproc_stream_id;
5916             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5917                 LOGD("No reprocessible stream found, ignore crop data");
5918             } else {
5919                 int rc = NO_ERROR;
5920                 Vector<int32_t> roi_map;
5921                 int32_t *crop = new int32_t[cnt*4];
5922                 if (NULL == crop) {
5923                    rc = NO_MEMORY;
5924                 }
5925                 if (NO_ERROR == rc) {
5926                     int32_t streams_found = 0;
5927                     for (size_t i = 0; i < cnt; i++) {
5928                         if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
5929                             if (pprocDone) {
5930                                 // HAL already does internal reprocessing,
5931                                 // either via reprocessing before JPEG encoding,
5932                                 // or offline postprocessing for pproc bypass case.
5933                                 crop[0] = 0;
5934                                 crop[1] = 0;
5935                                 crop[2] = mInputStreamInfo.dim.width;
5936                                 crop[3] = mInputStreamInfo.dim.height;
5937                             } else {
5938                                 crop[0] = crop_data->crop_info[i].crop.left;
5939                                 crop[1] = crop_data->crop_info[i].crop.top;
5940                                 crop[2] = crop_data->crop_info[i].crop.width;
5941                                 crop[3] = crop_data->crop_info[i].crop.height;
5942                             }
5943                             roi_map.add(crop_data->crop_info[i].roi_map.left);
5944                             roi_map.add(crop_data->crop_info[i].roi_map.top);
5945                             roi_map.add(crop_data->crop_info[i].roi_map.width);
5946                             roi_map.add(crop_data->crop_info[i].roi_map.height);
5947                             streams_found++;
5948                             LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
5949                                     crop[0], crop[1], crop[2], crop[3]);
5950                             LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
5951                                     crop_data->crop_info[i].roi_map.left,
5952                                     crop_data->crop_info[i].roi_map.top,
5953                                     crop_data->crop_info[i].roi_map.width,
5954                                     crop_data->crop_info[i].roi_map.height);
5955                             break;
5956 
5957                        }
5958                     }
5959                     camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
5960                             &streams_found, 1);
5961                     camMetadata.update(QCAMERA3_CROP_REPROCESS,
5962                             crop, (size_t)(streams_found * 4));
5963                     if (roi_map.array()) {
5964                         camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
5965                                 roi_map.array(), roi_map.size());
5966                     }
5967                }
5968                if (crop) {
5969                    delete [] crop;
5970                }
5971             }
5972         }
5973     }
5974 
5975     if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
5976         // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
5977         // so hardcoding the CAC result to OFF mode.
5978         uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5979         camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
5980     } else {
5981         IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
5982             int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5983                     *cacMode);
5984             if (NAME_NOT_FOUND != val) {
5985                 uint8_t resultCacMode = (uint8_t)val;
5986                 // check whether CAC result from CB is equal to Framework set CAC mode
5987                 // If not equal then set the CAC mode came in corresponding request
5988                 if (fwk_cacMode != resultCacMode) {
5989                     resultCacMode = fwk_cacMode;
5990                 }
5991                 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
5992                 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
5993             } else {
5994                 LOGE("Invalid CAC camera parameter: %d", *cacMode);
5995             }
5996         }
5997     }
5998 
5999     // Post blob of cam_cds_data through vendor tag.
6000     IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
6001         uint8_t cnt = cdsInfo->num_of_streams;
6002         cam_cds_data_t cdsDataOverride;
6003         memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
6004         cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
6005         cdsDataOverride.num_of_streams = 1;
6006         if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
6007             uint32_t reproc_stream_id;
6008             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6009                 LOGD("No reprocessible stream found, ignore cds data");
6010             } else {
6011                 for (size_t i = 0; i < cnt; i++) {
6012                     if (cdsInfo->cds_info[i].stream_id ==
6013                             reproc_stream_id) {
6014                         cdsDataOverride.cds_info[0].cds_enable =
6015                                 cdsInfo->cds_info[i].cds_enable;
6016                         break;
6017                     }
6018                 }
6019             }
6020         } else {
6021             LOGD("Invalid stream count %d in CDS_DATA", cnt);
6022         }
6023         camMetadata.update(QCAMERA3_CDS_INFO,
6024                 (uint8_t *)&cdsDataOverride,
6025                 sizeof(cam_cds_data_t));
6026     }
6027 
6028     // Ldaf calibration data
6029     if (!mLdafCalibExist) {
6030         IF_META_AVAILABLE(uint32_t, ldafCalib,
6031                 CAM_INTF_META_LDAF_EXIF, metadata) {
6032             mLdafCalibExist = true;
6033             mLdafCalib[0] = ldafCalib[0];
6034             mLdafCalib[1] = ldafCalib[1];
6035             LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
6036                     ldafCalib[0], ldafCalib[1]);
6037         }
6038     }
6039 
6040     // AF scene change
6041     IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
6042         camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
6043     }
6044 
6045     /* In batch mode, cache the first metadata in the batch */
6046     if (mBatchSize && firstMetadataInBatch) {
6047         mCachedMetadata.clear();
6048         mCachedMetadata = camMetadata;
6049     }
6050 
6051     resultMetadata = camMetadata.release();
6052     return resultMetadata;
6053 }
6054 
6055 /*===========================================================================
6056  * FUNCTION   : saveExifParams
6057  *
6058  * DESCRIPTION:
6059  *
6060  * PARAMETERS :
6061  *   @metadata : metadata information from callback
6062  *
6063  * RETURN     : none
6064  *
6065  *==========================================================================*/
saveExifParams(metadata_buffer_t * metadata)6066 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
6067 {
6068     IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
6069             CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
6070         if (mExifParams.debug_params) {
6071             mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
6072             mExifParams.debug_params->ae_debug_params_valid = TRUE;
6073         }
6074     }
6075     IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
6076             CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
6077         if (mExifParams.debug_params) {
6078             mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
6079             mExifParams.debug_params->awb_debug_params_valid = TRUE;
6080         }
6081     }
6082     IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
6083             CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
6084         if (mExifParams.debug_params) {
6085             mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
6086             mExifParams.debug_params->af_debug_params_valid = TRUE;
6087         }
6088     }
6089     IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
6090             CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
6091         if (mExifParams.debug_params) {
6092             mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
6093             mExifParams.debug_params->asd_debug_params_valid = TRUE;
6094         }
6095     }
6096     IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
6097             CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
6098         if (mExifParams.debug_params) {
6099             mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
6100             mExifParams.debug_params->stats_debug_params_valid = TRUE;
6101         }
6102     }
6103     IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
6104             CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
6105         if (mExifParams.debug_params) {
6106             mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
6107             mExifParams.debug_params->bestats_debug_params_valid = TRUE;
6108         }
6109     }
6110     IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
6111             CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
6112         if (mExifParams.debug_params) {
6113             mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
6114             mExifParams.debug_params->bhist_debug_params_valid = TRUE;
6115         }
6116     }
6117     IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
6118             CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
6119         if (mExifParams.debug_params) {
6120             mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
6121             mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
6122         }
6123     }
6124 }
6125 
6126 /*===========================================================================
6127  * FUNCTION   : get3AExifParams
6128  *
6129  * DESCRIPTION:
6130  *
6131  * PARAMETERS : none
6132  *
6133  *
6134  * RETURN     : mm_jpeg_exif_params_t
6135  *
6136  *==========================================================================*/
get3AExifParams()6137 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6138 {
6139     return mExifParams;
6140 }
6141 
6142 /*===========================================================================
6143  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
6144  *
6145  * DESCRIPTION:
6146  *
6147  * PARAMETERS :
6148  *   @metadata : metadata information from callback
6149  *
6150  * RETURN     : camera_metadata_t*
6151  *              metadata in a format specified by fwk
6152  *==========================================================================*/
6153 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata)6154 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6155                                 (metadata_buffer_t *metadata)
6156 {
6157     CameraMetadata camMetadata;
6158     camera_metadata_t *resultMetadata;
6159 
6160 
6161     IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6162         uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6163         camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6164         LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6165     }
6166 
6167     IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6168         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6169                 &aecTrigger->trigger, 1);
6170         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6171                 &aecTrigger->trigger_id, 1);
6172         LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6173                  aecTrigger->trigger);
6174         LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6175                 aecTrigger->trigger_id);
6176     }
6177 
6178     IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6179         uint8_t fwk_ae_state = (uint8_t) *ae_state;
6180         camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6181         LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6182     }
6183 
6184     IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6185         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6186                 &af_trigger->trigger, 1);
6187         LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6188                  af_trigger->trigger);
6189         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
6190         LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6191                 af_trigger->trigger_id);
6192     }
6193 
6194     IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6195         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6196                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6197         if (NAME_NOT_FOUND != val) {
6198             uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6199             camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6200             LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6201         } else {
6202             LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6203         }
6204     }
6205 
6206     uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6207     uint32_t aeMode = CAM_AE_MODE_MAX;
6208     int32_t flashMode = CAM_FLASH_MODE_MAX;
6209     int32_t redeye = -1;
6210     IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6211         aeMode = *pAeMode;
6212     }
6213     IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6214         flashMode = *pFlashMode;
6215     }
6216     IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6217         redeye = *pRedeye;
6218     }
6219 
6220     if (1 == redeye) {
6221         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6222         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6223     } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
6224         int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6225                 flashMode);
6226         if (NAME_NOT_FOUND != val) {
6227             fwk_aeMode = (uint8_t)val;
6228             camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6229         } else {
6230             LOGE("Unsupported flash mode %d", flashMode);
6231         }
6232     } else if (aeMode == CAM_AE_MODE_ON) {
6233         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
6234         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6235     } else if (aeMode == CAM_AE_MODE_OFF) {
6236         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6237         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6238     } else {
6239         LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
6240               "flashMode:%d, aeMode:%u!!!",
6241                  redeye, flashMode, aeMode);
6242     }
6243 
6244     resultMetadata = camMetadata.release();
6245     return resultMetadata;
6246 }
6247 
6248 /*===========================================================================
6249  * FUNCTION   : dumpMetadataToFile
6250  *
6251  * DESCRIPTION: Dumps tuning metadata to file system
6252  *
6253  * PARAMETERS :
6254  *   @meta           : tuning metadata
6255  *   @dumpFrameCount : current dump frame count
6256  *   @enabled        : Enable mask
6257  *
6258  *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,bool enabled,const char * type,uint32_t frameNumber)6259 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
6260                                                    uint32_t &dumpFrameCount,
6261                                                    bool enabled,
6262                                                    const char *type,
6263                                                    uint32_t frameNumber)
6264 {
6265     //Some sanity checks
6266     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
6267         LOGE("Tuning sensor data size bigger than expected %d: %d",
6268               meta.tuning_sensor_data_size,
6269               TUNING_SENSOR_DATA_MAX);
6270         return;
6271     }
6272 
6273     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
6274         LOGE("Tuning VFE data size bigger than expected %d: %d",
6275               meta.tuning_vfe_data_size,
6276               TUNING_VFE_DATA_MAX);
6277         return;
6278     }
6279 
6280     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
6281         LOGE("Tuning CPP data size bigger than expected %d: %d",
6282               meta.tuning_cpp_data_size,
6283               TUNING_CPP_DATA_MAX);
6284         return;
6285     }
6286 
6287     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
6288         LOGE("Tuning CAC data size bigger than expected %d: %d",
6289               meta.tuning_cac_data_size,
6290               TUNING_CAC_DATA_MAX);
6291         return;
6292     }
6293     //
6294 
6295     if(enabled){
6296         char timeBuf[FILENAME_MAX];
6297         char buf[FILENAME_MAX];
6298         memset(buf, 0, sizeof(buf));
6299         memset(timeBuf, 0, sizeof(timeBuf));
6300         time_t current_time;
6301         struct tm * timeinfo;
6302         time (&current_time);
6303         timeinfo = localtime (&current_time);
6304         if (timeinfo != NULL) {
6305             /* Consistent naming for Jpeg+meta+raw: meta name */
6306             strftime (timeBuf, sizeof(timeBuf),
6307                     QCAMERA_DUMP_FRM_LOCATION"IMG_%Y%m%d_%H%M%S", timeinfo);
6308             /* Consistent naming for Jpeg+meta+raw: meta name end*/
6309         }
6310         String8 filePath(timeBuf);
6311          /* Consistent naming for Jpeg+meta+raw */
6312         snprintf(buf,
6313                 sizeof(buf),
6314                 "%dm_%s_%d.bin",
6315                 dumpFrameCount,
6316                 type,
6317                 frameNumber);
6318          /* Consistent naming for Jpeg+meta+raw end */
6319         filePath.append(buf);
6320         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
6321         if (file_fd >= 0) {
6322             ssize_t written_len = 0;
6323             meta.tuning_data_version = TUNING_DATA_VERSION;
6324             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
6325             written_len += write(file_fd, data, sizeof(uint32_t));
6326             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
6327             LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6328             written_len += write(file_fd, data, sizeof(uint32_t));
6329             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
6330             LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6331             written_len += write(file_fd, data, sizeof(uint32_t));
6332             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
6333             LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6334             written_len += write(file_fd, data, sizeof(uint32_t));
6335             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
6336             LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6337             written_len += write(file_fd, data, sizeof(uint32_t));
6338             meta.tuning_mod3_data_size = 0;
6339             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
6340             LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6341             written_len += write(file_fd, data, sizeof(uint32_t));
6342             size_t total_size = meta.tuning_sensor_data_size;
6343             data = (void *)((uint8_t *)&meta.data);
6344             written_len += write(file_fd, data, total_size);
6345             total_size = meta.tuning_vfe_data_size;
6346             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
6347             written_len += write(file_fd, data, total_size);
6348             total_size = meta.tuning_cpp_data_size;
6349             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
6350             written_len += write(file_fd, data, total_size);
6351             total_size = meta.tuning_cac_data_size;
6352             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
6353             written_len += write(file_fd, data, total_size);
6354             close(file_fd);
6355         }else {
6356             LOGE("fail to open file for metadata dumping");
6357         }
6358     }
6359 }
6360 
6361 /*===========================================================================
6362  * FUNCTION   : cleanAndSortStreamInfo
6363  *
6364  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
6365  *              and sort them such that raw stream is at the end of the list
6366  *              This is a workaround for camera daemon constraint.
6367  *
6368  * PARAMETERS : None
6369  *
6370  *==========================================================================*/
cleanAndSortStreamInfo()6371 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
6372 {
6373     List<stream_info_t *> newStreamInfo;
6374 
6375     /*clean up invalid streams*/
6376     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
6377             it != mStreamInfo.end();) {
6378         if(((*it)->status) == INVALID){
6379             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
6380             delete channel;
6381             free(*it);
6382             it = mStreamInfo.erase(it);
6383         } else {
6384             it++;
6385         }
6386     }
6387 
6388     // Move preview/video/callback/snapshot streams into newList
6389     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6390             it != mStreamInfo.end();) {
6391         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
6392                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
6393                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
6394             newStreamInfo.push_back(*it);
6395             it = mStreamInfo.erase(it);
6396         } else
6397             it++;
6398     }
6399     // Move raw streams into newList
6400     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6401             it != mStreamInfo.end();) {
6402         newStreamInfo.push_back(*it);
6403         it = mStreamInfo.erase(it);
6404     }
6405 
6406     mStreamInfo = newStreamInfo;
6407 }
6408 
6409 /*===========================================================================
6410  * FUNCTION   : extractJpegMetadata
6411  *
6412  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
6413  *              JPEG metadata is cached in HAL, and return as part of capture
6414  *              result when metadata is returned from camera daemon.
6415  *
6416  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
6417  *              @request:      capture request
6418  *
6419  *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)6420 void QCamera3HardwareInterface::extractJpegMetadata(
6421         CameraMetadata& jpegMetadata,
6422         const camera3_capture_request_t *request)
6423 {
6424     CameraMetadata frame_settings;
6425     frame_settings = request->settings;
6426 
6427     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
6428         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
6429                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
6430                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
6431 
6432     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
6433         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
6434                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
6435                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
6436 
6437     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
6438         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
6439                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
6440                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
6441 
6442     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
6443         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
6444                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
6445                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
6446 
6447     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
6448         jpegMetadata.update(ANDROID_JPEG_QUALITY,
6449                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
6450                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
6451 
6452     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
6453         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
6454                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
6455                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
6456 
6457     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
6458         int32_t thumbnail_size[2];
6459         thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6460         thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6461         if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6462             int32_t orientation =
6463                   frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
6464             if ((orientation == 90) || (orientation == 270)) {
6465                //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
6466                int32_t temp;
6467                temp = thumbnail_size[0];
6468                thumbnail_size[0] = thumbnail_size[1];
6469                thumbnail_size[1] = temp;
6470             }
6471          }
6472          jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
6473                 thumbnail_size,
6474                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
6475     }
6476 
6477 }
6478 
6479 /*===========================================================================
6480  * FUNCTION   : convertToRegions
6481  *
6482  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
6483  *
6484  * PARAMETERS :
6485  *   @rect   : cam_rect_t struct to convert
6486  *   @region : int32_t destination array
6487  *   @weight : if we are converting from cam_area_t, weight is valid
6488  *             else weight = -1
6489  *
6490  *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)6491 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
6492         int32_t *region, int weight)
6493 {
6494     region[0] = rect.left;
6495     region[1] = rect.top;
6496     region[2] = rect.left + rect.width;
6497     region[3] = rect.top + rect.height;
6498     if (weight > -1) {
6499         region[4] = weight;
6500     }
6501 }
6502 
6503 /*===========================================================================
6504  * FUNCTION   : convertFromRegions
6505  *
6506  * DESCRIPTION: helper method to convert from array to cam_rect_t
6507  *
6508  * PARAMETERS :
6509  *   @rect   : cam_rect_t struct to convert
6510  *   @region : int32_t destination array
6511  *   @weight : if we are converting from cam_area_t, weight is valid
6512  *             else weight = -1
6513  *
6514  *==========================================================================*/
convertFromRegions(cam_area_t & roi,const camera_metadata_t * settings,uint32_t tag)6515 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
6516         const camera_metadata_t *settings, uint32_t tag)
6517 {
6518     CameraMetadata frame_settings;
6519     frame_settings = settings;
6520     int32_t x_min = frame_settings.find(tag).data.i32[0];
6521     int32_t y_min = frame_settings.find(tag).data.i32[1];
6522     int32_t x_max = frame_settings.find(tag).data.i32[2];
6523     int32_t y_max = frame_settings.find(tag).data.i32[3];
6524     roi.weight = frame_settings.find(tag).data.i32[4];
6525     roi.rect.left = x_min;
6526     roi.rect.top = y_min;
6527     roi.rect.width = x_max - x_min;
6528     roi.rect.height = y_max - y_min;
6529 }
6530 
6531 /*===========================================================================
6532  * FUNCTION   : resetIfNeededROI
6533  *
6534  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
6535  *              crop region
6536  *
6537  * PARAMETERS :
6538  *   @roi       : cam_area_t struct to resize
6539  *   @scalerCropRegion : cam_crop_region_t region to compare against
6540  *
6541  *
6542  *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)6543 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
6544                                                  const cam_crop_region_t* scalerCropRegion)
6545 {
6546     int32_t roi_x_max = roi->rect.width + roi->rect.left;
6547     int32_t roi_y_max = roi->rect.height + roi->rect.top;
6548     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
6549     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
6550 
6551     /* According to spec weight = 0 is used to indicate roi needs to be disabled
6552      * without having this check the calculations below to validate if the roi
6553      * is inside scalar crop region will fail resulting in the roi not being
6554      * reset causing algorithm to continue to use stale roi window
6555      */
6556     if (roi->weight == 0) {
6557         return true;
6558     }
6559 
6560     if ((roi_x_max < scalerCropRegion->left) ||
6561         // right edge of roi window is left of scalar crop's left edge
6562         (roi_y_max < scalerCropRegion->top)  ||
6563         // bottom edge of roi window is above scalar crop's top edge
6564         (roi->rect.left > crop_x_max) ||
6565         // left edge of roi window is beyond(right) of scalar crop's right edge
6566         (roi->rect.top > crop_y_max)){
6567         // top edge of roi windo is above scalar crop's top edge
6568         return false;
6569     }
6570     if (roi->rect.left < scalerCropRegion->left) {
6571         roi->rect.left = scalerCropRegion->left;
6572     }
6573     if (roi->rect.top < scalerCropRegion->top) {
6574         roi->rect.top = scalerCropRegion->top;
6575     }
6576     if (roi_x_max > crop_x_max) {
6577         roi_x_max = crop_x_max;
6578     }
6579     if (roi_y_max > crop_y_max) {
6580         roi_y_max = crop_y_max;
6581     }
6582     roi->rect.width = roi_x_max - roi->rect.left;
6583     roi->rect.height = roi_y_max - roi->rect.top;
6584     return true;
6585 }
6586 
6587 /*===========================================================================
6588  * FUNCTION   : convertLandmarks
6589  *
6590  * DESCRIPTION: helper method to extract the landmarks from face detection info
6591  *
6592  * PARAMETERS :
6593  *   @landmark_data : input landmark data to be converted
6594  *   @landmarks : int32_t destination array
6595  *
6596  *
6597  *==========================================================================*/
convertLandmarks(cam_face_landmarks_info_t landmark_data,int32_t * landmarks)6598 void QCamera3HardwareInterface::convertLandmarks(
6599         cam_face_landmarks_info_t landmark_data,
6600         int32_t *landmarks)
6601 {
6602     landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
6603     landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
6604     landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
6605     landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
6606     landmarks[4] = (int32_t)landmark_data.mouth_center.x;
6607     landmarks[5] = (int32_t)landmark_data.mouth_center.y;
6608 }
6609 
6610 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
6611 /*===========================================================================
6612  * FUNCTION   : initCapabilities
6613  *
6614  * DESCRIPTION: initialize camera capabilities in static data struct
6615  *
6616  * PARAMETERS :
6617  *   @cameraId  : camera Id
6618  *
6619  * RETURN     : int32_t type of status
6620  *              NO_ERROR  -- success
6621  *              none-zero failure code
6622  *==========================================================================*/
initCapabilities(uint32_t cameraId)6623 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
6624 {
6625     int rc = 0;
6626     mm_camera_vtbl_t *cameraHandle = NULL;
6627     QCamera3HeapMemory *capabilityHeap = NULL;
6628 
6629     rc = camera_open((uint8_t)cameraId, &cameraHandle);
6630     if (rc) {
6631         LOGE("camera_open failed. rc = %d", rc);
6632         goto open_failed;
6633     }
6634     if (!cameraHandle) {
6635         LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
6636         goto open_failed;
6637     }
6638 
6639     capabilityHeap = new QCamera3HeapMemory(1);
6640     if (capabilityHeap == NULL) {
6641         LOGE("creation of capabilityHeap failed");
6642         goto heap_creation_failed;
6643     }
6644     /* Allocate memory for capability buffer */
6645     rc = capabilityHeap->allocate(sizeof(cam_capability_t));
6646     if(rc != OK) {
6647         LOGE("No memory for cappability");
6648         goto allocate_failed;
6649     }
6650 
6651     /* Map memory for capability buffer */
6652     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
6653     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
6654                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
6655                                 capabilityHeap->getFd(0),
6656                                 sizeof(cam_capability_t),
6657                                 capabilityHeap->getPtr(0));
6658     if(rc < 0) {
6659         LOGE("failed to map capability buffer");
6660         goto map_failed;
6661     }
6662 
6663     /* Query Capability */
6664     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
6665     if(rc < 0) {
6666         LOGE("failed to query capability");
6667         goto query_failed;
6668     }
6669     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
6670     if (!gCamCapability[cameraId]) {
6671         LOGE("out of memory");
6672         goto query_failed;
6673     }
6674     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
6675                                         sizeof(cam_capability_t));
6676 
6677     int index;
6678     for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
6679         cam_analysis_info_t *p_analysis_info =
6680                 &gCamCapability[cameraId]->analysis_info[index];
6681         p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
6682         p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
6683     }
6684     rc = 0;
6685 
6686 query_failed:
6687     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
6688                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
6689 map_failed:
6690     capabilityHeap->deallocate();
6691 allocate_failed:
6692     delete capabilityHeap;
6693 heap_creation_failed:
6694     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
6695     cameraHandle = NULL;
6696 open_failed:
6697     return rc;
6698 }
6699 
6700 /*==========================================================================
6701  * FUNCTION   : get3Aversion
6702  *
6703  * DESCRIPTION: get the Q3A S/W version
6704  *
6705  * PARAMETERS :
6706  *  @sw_version: Reference of Q3A structure which will hold version info upon
6707  *               return
6708  *
6709  * RETURN     : None
6710  *
6711  *==========================================================================*/
get3AVersion(cam_q3a_version_t & sw_version)6712 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
6713 {
6714     if(gCamCapability[mCameraId])
6715         sw_version = gCamCapability[mCameraId]->q3a_version;
6716     else
6717         LOGE("Capability structure NULL!");
6718 }
6719 
6720 
6721 /*===========================================================================
6722  * FUNCTION   : initParameters
6723  *
6724  * DESCRIPTION: initialize camera parameters
6725  *
6726  * PARAMETERS :
6727  *
6728  * RETURN     : int32_t type of status
6729  *              NO_ERROR  -- success
6730  *              none-zero failure code
6731  *==========================================================================*/
initParameters()6732 int QCamera3HardwareInterface::initParameters()
6733 {
6734     int rc = 0;
6735 
6736     //Allocate Set Param Buffer
6737     mParamHeap = new QCamera3HeapMemory(1);
6738     rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
6739     if(rc != OK) {
6740         rc = NO_MEMORY;
6741         LOGE("Failed to allocate SETPARM Heap memory");
6742         delete mParamHeap;
6743         mParamHeap = NULL;
6744         return rc;
6745     }
6746 
6747     //Map memory for parameters buffer
6748     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
6749             CAM_MAPPING_BUF_TYPE_PARM_BUF,
6750             mParamHeap->getFd(0),
6751             sizeof(metadata_buffer_t),
6752             (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
6753     if(rc < 0) {
6754         LOGE("failed to map SETPARM buffer");
6755         rc = FAILED_TRANSACTION;
6756         mParamHeap->deallocate();
6757         delete mParamHeap;
6758         mParamHeap = NULL;
6759         return rc;
6760     }
6761 
6762     mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
6763 
6764     mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
6765     return rc;
6766 }
6767 
6768 /*===========================================================================
6769  * FUNCTION   : deinitParameters
6770  *
6771  * DESCRIPTION: de-initialize camera parameters
6772  *
6773  * PARAMETERS :
6774  *
6775  * RETURN     : NONE
6776  *==========================================================================*/
deinitParameters()6777 void QCamera3HardwareInterface::deinitParameters()
6778 {
6779     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6780             CAM_MAPPING_BUF_TYPE_PARM_BUF);
6781 
6782     mParamHeap->deallocate();
6783     delete mParamHeap;
6784     mParamHeap = NULL;
6785 
6786     mParameters = NULL;
6787 
6788     free(mPrevParameters);
6789     mPrevParameters = NULL;
6790 }
6791 
6792 /*===========================================================================
6793  * FUNCTION   : calcMaxJpegSize
6794  *
6795  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6796  *
6797  * PARAMETERS :
6798  *
6799  * RETURN     : max_jpeg_size
6800  *==========================================================================*/
calcMaxJpegSize(uint32_t camera_id)6801 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6802 {
6803     size_t max_jpeg_size = 0;
6804     size_t temp_width, temp_height;
6805     size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6806             MAX_SIZES_CNT);
6807     for (size_t i = 0; i < count; i++) {
6808         temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6809         temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6810         if (temp_width * temp_height > max_jpeg_size ) {
6811             max_jpeg_size = temp_width * temp_height;
6812         }
6813     }
6814     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6815     return max_jpeg_size;
6816 }
6817 
6818 /*===========================================================================
6819  * FUNCTION   : getMaxRawSize
6820  *
6821  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6822  *
6823  * PARAMETERS :
6824  *
6825  * RETURN     : Largest supported Raw Dimension
6826  *==========================================================================*/
getMaxRawSize(uint32_t camera_id)6827 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6828 {
6829     int max_width = 0;
6830     cam_dimension_t maxRawSize;
6831 
6832     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6833     for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6834         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6835             max_width = gCamCapability[camera_id]->raw_dim[i].width;
6836             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6837         }
6838     }
6839     return maxRawSize;
6840 }
6841 
6842 
6843 /*===========================================================================
6844  * FUNCTION   : calcMaxJpegDim
6845  *
6846  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6847  *
6848  * PARAMETERS :
6849  *
6850  * RETURN     : max_jpeg_dim
6851  *==========================================================================*/
calcMaxJpegDim()6852 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6853 {
6854     cam_dimension_t max_jpeg_dim;
6855     cam_dimension_t curr_jpeg_dim;
6856     max_jpeg_dim.width = 0;
6857     max_jpeg_dim.height = 0;
6858     curr_jpeg_dim.width = 0;
6859     curr_jpeg_dim.height = 0;
6860     for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
6861         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
6862         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
6863         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
6864             max_jpeg_dim.width * max_jpeg_dim.height ) {
6865             max_jpeg_dim.width = curr_jpeg_dim.width;
6866             max_jpeg_dim.height = curr_jpeg_dim.height;
6867         }
6868     }
6869     return max_jpeg_dim;
6870 }
6871 
6872 /*===========================================================================
6873  * FUNCTION   : addStreamConfig
6874  *
6875  * DESCRIPTION: adds the stream configuration to the array
6876  *
6877  * PARAMETERS :
6878  * @available_stream_configs : pointer to stream configuration array
6879  * @scalar_format            : scalar format
6880  * @dim                      : configuration dimension
6881  * @config_type              : input or output configuration type
6882  *
6883  * RETURN     : NONE
6884  *==========================================================================*/
addStreamConfig(Vector<int32_t> & available_stream_configs,int32_t scalar_format,const cam_dimension_t & dim,int32_t config_type)6885 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
6886         int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
6887 {
6888     available_stream_configs.add(scalar_format);
6889     available_stream_configs.add(dim.width);
6890     available_stream_configs.add(dim.height);
6891     available_stream_configs.add(config_type);
6892 }
6893 
6894 /*===========================================================================
6895  * FUNCTION   : suppportBurstCapture
6896  *
6897  * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
6898  *
6899  * PARAMETERS :
6900  *   @cameraId  : camera Id
6901  *
6902  * RETURN     : true if camera supports BURST_CAPTURE
6903  *              false otherwise
6904  *==========================================================================*/
supportBurstCapture(uint32_t cameraId)6905 bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
6906 {
6907     const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
6908     const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
6909     const int32_t highResWidth = 3264;
6910     const int32_t highResHeight = 2448;
6911 
6912     if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
6913         // Maximum resolution images cannot be captured at >= 10fps
6914         // -> not supporting BURST_CAPTURE
6915         return false;
6916     }
6917 
6918     if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
6919         // Maximum resolution images can be captured at >= 20fps
6920         // --> supporting BURST_CAPTURE
6921         return true;
6922     }
6923 
6924     // Find the smallest highRes resolution, or largest resolution if there is none
6925     size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
6926             MAX_SIZES_CNT);
6927     size_t highRes = 0;
6928     while ((highRes + 1 < totalCnt) &&
6929             (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
6930             gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
6931             highResWidth * highResHeight)) {
6932         highRes++;
6933     }
6934     if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
6935         return true;
6936     } else {
6937         return false;
6938     }
6939 }
6940 
6941 /*===========================================================================
6942  * FUNCTION   : initStaticMetadata
6943  *
6944  * DESCRIPTION: initialize the static metadata
6945  *
6946  * PARAMETERS :
6947  *   @cameraId  : camera Id
6948  *
6949  * RETURN     : int32_t type of status
6950  *              0  -- success
6951  *              non-zero failure code
6952  *==========================================================================*/
initStaticMetadata(uint32_t cameraId)6953 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
6954 {
6955     int rc = 0;
6956     CameraMetadata staticInfo;
6957     size_t count = 0;
6958     bool limitedDevice = false;
6959     char prop[PROPERTY_VALUE_MAX];
6960     bool supportBurst = false;
6961 
6962     supportBurst = supportBurstCapture(cameraId);
6963 
6964     /* If sensor is YUV sensor (no raw support) or if per-frame control is not
6965      * guaranteed or if min fps of max resolution is less than 20 fps, its
6966      * advertised as limited device*/
6967     limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
6968             (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
6969             (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
6970             !supportBurst;
6971 
6972     uint8_t supportedHwLvl = limitedDevice ?
6973             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
6974             // LEVEL_3 - This device will support level 3.
6975             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
6976 
6977     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
6978             &supportedHwLvl, 1);
6979 
6980     bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
6981     /*HAL 3 only*/
6982     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6983                     &gCamCapability[cameraId]->min_focus_distance, 1);
6984 
6985     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
6986                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
6987 
6988     /*should be using focal lengths but sensor doesn't provide that info now*/
6989     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6990                       &gCamCapability[cameraId]->focal_length,
6991                       1);
6992 
6993     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6994             gCamCapability[cameraId]->apertures,
6995             MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
6996 
6997     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6998             gCamCapability[cameraId]->filter_densities,
6999             MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
7000 
7001 
7002     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7003             (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
7004             MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
7005 
7006     int32_t lens_shading_map_size[] = {
7007             MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
7008             MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
7009     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
7010                       lens_shading_map_size,
7011                       sizeof(lens_shading_map_size)/sizeof(int32_t));
7012 
7013     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
7014             gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
7015 
7016     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
7017             gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
7018 
7019     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7020             &gCamCapability[cameraId]->max_frame_duration, 1);
7021 
7022     camera_metadata_rational baseGainFactor = {
7023             gCamCapability[cameraId]->base_gain_factor.numerator,
7024             gCamCapability[cameraId]->base_gain_factor.denominator};
7025     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
7026                       &baseGainFactor, 1);
7027 
7028     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7029                      (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
7030 
7031     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
7032             gCamCapability[cameraId]->pixel_array_size.height};
7033     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7034                       pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
7035 
7036     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
7037             gCamCapability[cameraId]->active_array_size.top,
7038             gCamCapability[cameraId]->active_array_size.width,
7039             gCamCapability[cameraId]->active_array_size.height};
7040     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7041             active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
7042 
7043     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
7044             &gCamCapability[cameraId]->white_level, 1);
7045 
7046     int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
7047     adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
7048             gCamCapability[cameraId]->color_arrangement);
7049     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
7050             adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
7051 
7052     bool hasBlackRegions = false;
7053     if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
7054         LOGW("black_region_count: %d is bounded to %d",
7055             gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
7056         gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
7057     }
7058     if (gCamCapability[cameraId]->optical_black_region_count != 0) {
7059         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
7060         for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
7061             opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
7062         }
7063         staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
7064                 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
7065         hasBlackRegions = true;
7066     }
7067 
7068     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
7069             &gCamCapability[cameraId]->flash_charge_duration, 1);
7070 
7071     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
7072             &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
7073 
7074     uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
7075     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7076             &timestampSource, 1);
7077 
7078     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7079             &gCamCapability[cameraId]->histogram_size, 1);
7080 
7081     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7082             &gCamCapability[cameraId]->max_histogram_count, 1);
7083 
7084     int32_t sharpness_map_size[] = {
7085             gCamCapability[cameraId]->sharpness_map_size.width,
7086             gCamCapability[cameraId]->sharpness_map_size.height};
7087 
7088     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
7089             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
7090 
7091     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7092             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
7093 
7094     int32_t scalar_formats[] = {
7095             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
7096             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
7097             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7098             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
7099             HAL_PIXEL_FORMAT_RAW10,
7100             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7101     size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
7102     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
7103                       scalar_formats,
7104                       scalar_formats_count);
7105 
7106     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
7107     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7108     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
7109             count, MAX_SIZES_CNT, available_processed_sizes);
7110     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
7111             available_processed_sizes, count * 2);
7112 
7113     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
7114     count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
7115     makeTable(gCamCapability[cameraId]->raw_dim,
7116             count, MAX_SIZES_CNT, available_raw_sizes);
7117     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
7118             available_raw_sizes, count * 2);
7119 
7120     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
7121     count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
7122     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
7123             count, MAX_SIZES_CNT, available_fps_ranges);
7124     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7125             available_fps_ranges, count * 2);
7126 
7127     camera_metadata_rational exposureCompensationStep = {
7128             gCamCapability[cameraId]->exp_compensation_step.numerator,
7129             gCamCapability[cameraId]->exp_compensation_step.denominator};
7130     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
7131                       &exposureCompensationStep, 1);
7132 
7133     Vector<uint8_t> availableVstabModes;
7134     availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
7135     char eis_prop[PROPERTY_VALUE_MAX];
7136     memset(eis_prop, 0, sizeof(eis_prop));
7137     property_get("persist.camera.eis.enable", eis_prop, "0");
7138     uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7139     if (facingBack && eis_prop_set) {
7140         availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
7141     }
7142     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7143                       availableVstabModes.array(), availableVstabModes.size());
7144 
7145     /*HAL 1 and HAL 3 common*/
7146     float maxZoom = 4;
7147     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7148             &maxZoom, 1);
7149 
7150     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
7151     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
7152 
7153     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
7154     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
7155         max3aRegions[2] = 0; /* AF not supported */
7156     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
7157             max3aRegions, 3);
7158 
7159     /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
7160     memset(prop, 0, sizeof(prop));
7161     property_get("persist.camera.facedetect", prop, "1");
7162     uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
7163     LOGD("Support face detection mode: %d",
7164              supportedFaceDetectMode);
7165 
7166     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
7167     Vector<uint8_t> availableFaceDetectModes;
7168     availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
7169     if (supportedFaceDetectMode == 1) {
7170         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7171     } else if (supportedFaceDetectMode == 2) {
7172         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7173     } else if (supportedFaceDetectMode == 3) {
7174         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7175         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7176     } else {
7177         maxFaces = 0;
7178     }
7179     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7180             availableFaceDetectModes.array(),
7181             availableFaceDetectModes.size());
7182     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
7183             (int32_t *)&maxFaces, 1);
7184 
7185     int32_t exposureCompensationRange[] = {
7186             gCamCapability[cameraId]->exposure_compensation_min,
7187             gCamCapability[cameraId]->exposure_compensation_max};
7188     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
7189             exposureCompensationRange,
7190             sizeof(exposureCompensationRange)/sizeof(int32_t));
7191 
7192     uint8_t lensFacing = (facingBack) ?
7193             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
7194     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
7195 
7196     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7197                       available_thumbnail_sizes,
7198                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
7199 
7200     /*all sizes will be clubbed into this tag*/
7201     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7202     /*android.scaler.availableStreamConfigurations*/
7203     Vector<int32_t> available_stream_configs;
7204     cam_dimension_t active_array_dim;
7205     active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
7206     active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
7207     /* Add input/output stream configurations for each scalar formats*/
7208     for (size_t j = 0; j < scalar_formats_count; j++) {
7209         switch (scalar_formats[j]) {
7210         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7211         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7212         case HAL_PIXEL_FORMAT_RAW10:
7213             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7214                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7215                 addStreamConfig(available_stream_configs, scalar_formats[j],
7216                         gCamCapability[cameraId]->raw_dim[i],
7217                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7218             }
7219             break;
7220         case HAL_PIXEL_FORMAT_BLOB:
7221             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7222                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7223                 addStreamConfig(available_stream_configs, scalar_formats[j],
7224                         gCamCapability[cameraId]->picture_sizes_tbl[i],
7225                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7226             }
7227             break;
7228         case HAL_PIXEL_FORMAT_YCbCr_420_888:
7229         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
7230         default:
7231             cam_dimension_t largest_picture_size;
7232             memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
7233             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7234                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7235                 addStreamConfig(available_stream_configs, scalar_formats[j],
7236                         gCamCapability[cameraId]->picture_sizes_tbl[i],
7237                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7238                 /* Book keep largest */
7239                 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
7240                         >= largest_picture_size.width &&
7241                         gCamCapability[cameraId]->picture_sizes_tbl[i].height
7242                         >= largest_picture_size.height)
7243                     largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
7244             }
7245             /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
7246             if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
7247                     scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
7248                  addStreamConfig(available_stream_configs, scalar_formats[j],
7249                          largest_picture_size,
7250                          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
7251             }
7252             break;
7253         }
7254     }
7255 
7256     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7257                       available_stream_configs.array(), available_stream_configs.size());
7258     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7259     staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7260 
7261     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7262     staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7263 
7264     /* android.scaler.availableMinFrameDurations */
7265     Vector<int64_t> available_min_durations;
7266     for (size_t j = 0; j < scalar_formats_count; j++) {
7267         switch (scalar_formats[j]) {
7268         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7269         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7270         case HAL_PIXEL_FORMAT_RAW10:
7271             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7272                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7273                 available_min_durations.add(scalar_formats[j]);
7274                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7275                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7276                 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
7277             }
7278             break;
7279         default:
7280             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7281                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7282                 available_min_durations.add(scalar_formats[j]);
7283                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7284                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7285                 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
7286             }
7287             break;
7288         }
7289     }
7290     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
7291                       available_min_durations.array(), available_min_durations.size());
7292 
7293     Vector<int32_t> available_hfr_configs;
7294     for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
7295         int32_t fps = 0;
7296         switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
7297         case CAM_HFR_MODE_60FPS:
7298             fps = 60;
7299             break;
7300         case CAM_HFR_MODE_90FPS:
7301             fps = 90;
7302             break;
7303         case CAM_HFR_MODE_120FPS:
7304             fps = 120;
7305             break;
7306         case CAM_HFR_MODE_150FPS:
7307             fps = 150;
7308             break;
7309         case CAM_HFR_MODE_180FPS:
7310             fps = 180;
7311             break;
7312         case CAM_HFR_MODE_210FPS:
7313             fps = 210;
7314             break;
7315         case CAM_HFR_MODE_240FPS:
7316             fps = 240;
7317             break;
7318         case CAM_HFR_MODE_480FPS:
7319             fps = 480;
7320             break;
7321         case CAM_HFR_MODE_OFF:
7322         case CAM_HFR_MODE_MAX:
7323         default:
7324             break;
7325         }
7326 
7327         /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
7328         if (fps >= MIN_FPS_FOR_BATCH_MODE) {
7329             /* For each HFR frame rate, need to advertise one variable fps range
7330              * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
7331              * and [120, 120]. While camcorder preview alone is running [30, 120] is
7332              * set by the app. When video recording is started, [120, 120] is
7333              * set. This way sensor configuration does not change when recording
7334              * is started */
7335 
7336             /* (width, height, fps_min, fps_max, batch_size_max) */
7337             for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
7338                 j < MAX_SIZES_CNT; j++) {
7339                 available_hfr_configs.add(
7340                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7341                 available_hfr_configs.add(
7342                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7343                 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
7344                 available_hfr_configs.add(fps);
7345                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7346 
7347                 /* (width, height, fps_min, fps_max, batch_size_max) */
7348                 available_hfr_configs.add(
7349                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7350                 available_hfr_configs.add(
7351                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7352                 available_hfr_configs.add(fps);
7353                 available_hfr_configs.add(fps);
7354                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7355             }
7356        }
7357     }
7358     //Advertise HFR capability only if the property is set
7359     memset(prop, 0, sizeof(prop));
7360     property_get("persist.camera.hal3hfr.enable", prop, "1");
7361     uint8_t hfrEnable = (uint8_t)atoi(prop);
7362 
7363     if(hfrEnable && available_hfr_configs.array()) {
7364         staticInfo.update(
7365                 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
7366                 available_hfr_configs.array(), available_hfr_configs.size());
7367     }
7368 
7369     int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
7370     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
7371                       &max_jpeg_size, 1);
7372 
7373     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
7374     size_t size = 0;
7375     count = CAM_EFFECT_MODE_MAX;
7376     count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
7377     for (size_t i = 0; i < count; i++) {
7378         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7379                 gCamCapability[cameraId]->supported_effects[i]);
7380         if (NAME_NOT_FOUND != val) {
7381             avail_effects[size] = (uint8_t)val;
7382             size++;
7383         }
7384     }
7385     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
7386                       avail_effects,
7387                       size);
7388 
7389     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
7390     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
7391     size_t supported_scene_modes_cnt = 0;
7392     count = CAM_SCENE_MODE_MAX;
7393     count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
7394     for (size_t i = 0; i < count; i++) {
7395         if (gCamCapability[cameraId]->supported_scene_modes[i] !=
7396                 CAM_SCENE_MODE_OFF) {
7397             int val = lookupFwkName(SCENE_MODES_MAP,
7398                     METADATA_MAP_SIZE(SCENE_MODES_MAP),
7399                     gCamCapability[cameraId]->supported_scene_modes[i]);
7400             if (NAME_NOT_FOUND != val) {
7401                 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
7402                 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
7403                 supported_scene_modes_cnt++;
7404             }
7405         }
7406     }
7407     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7408                       avail_scene_modes,
7409                       supported_scene_modes_cnt);
7410 
7411     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
7412     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
7413                       supported_scene_modes_cnt,
7414                       CAM_SCENE_MODE_MAX,
7415                       scene_mode_overrides,
7416                       supported_indexes,
7417                       cameraId);
7418 
7419     if (supported_scene_modes_cnt == 0) {
7420         supported_scene_modes_cnt = 1;
7421         avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
7422     }
7423 
7424     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
7425             scene_mode_overrides, supported_scene_modes_cnt * 3);
7426 
7427     uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
7428                                          ANDROID_CONTROL_MODE_AUTO,
7429                                          ANDROID_CONTROL_MODE_USE_SCENE_MODE};
7430     staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
7431             available_control_modes,
7432             3);
7433 
7434     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
7435     size = 0;
7436     count = CAM_ANTIBANDING_MODE_MAX;
7437     count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
7438     for (size_t i = 0; i < count; i++) {
7439         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
7440                 gCamCapability[cameraId]->supported_antibandings[i]);
7441         if (NAME_NOT_FOUND != val) {
7442             avail_antibanding_modes[size] = (uint8_t)val;
7443             size++;
7444         }
7445 
7446     }
7447     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7448                       avail_antibanding_modes,
7449                       size);
7450 
7451     uint8_t avail_abberation_modes[] = {
7452             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
7453             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
7454             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
7455     count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
7456     count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
7457     if (0 == count) {
7458         //  If no aberration correction modes are available for a device, this advertise OFF mode
7459         size = 1;
7460     } else {
7461         // If count is not zero then atleast one among the FAST or HIGH quality is supported
7462         // So, advertize all 3 modes if atleast any one mode is supported as per the
7463         // new M requirement
7464         size = 3;
7465     }
7466     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7467             avail_abberation_modes,
7468             size);
7469 
7470     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
7471     size = 0;
7472     count = CAM_FOCUS_MODE_MAX;
7473     count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
7474     for (size_t i = 0; i < count; i++) {
7475         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7476                 gCamCapability[cameraId]->supported_focus_modes[i]);
7477         if (NAME_NOT_FOUND != val) {
7478             avail_af_modes[size] = (uint8_t)val;
7479             size++;
7480         }
7481     }
7482     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
7483                       avail_af_modes,
7484                       size);
7485 
7486     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
7487     size = 0;
7488     count = CAM_WB_MODE_MAX;
7489     count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
7490     for (size_t i = 0; i < count; i++) {
7491         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7492                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7493                 gCamCapability[cameraId]->supported_white_balances[i]);
7494         if (NAME_NOT_FOUND != val) {
7495             avail_awb_modes[size] = (uint8_t)val;
7496             size++;
7497         }
7498     }
7499     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
7500                       avail_awb_modes,
7501                       size);
7502 
7503     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
7504     count = CAM_FLASH_FIRING_LEVEL_MAX;
7505     count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
7506             count);
7507     for (size_t i = 0; i < count; i++) {
7508         available_flash_levels[i] =
7509                 gCamCapability[cameraId]->supported_firing_levels[i];
7510     }
7511     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
7512             available_flash_levels, count);
7513 
7514     uint8_t flashAvailable;
7515     if (gCamCapability[cameraId]->flash_available)
7516         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
7517     else
7518         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
7519     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
7520             &flashAvailable, 1);
7521 
7522     Vector<uint8_t> avail_ae_modes;
7523     count = CAM_AE_MODE_MAX;
7524     count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
7525     for (size_t i = 0; i < count; i++) {
7526         avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
7527     }
7528     if (flashAvailable) {
7529         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
7530         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
7531     }
7532     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
7533                       avail_ae_modes.array(),
7534                       avail_ae_modes.size());
7535 
7536     int32_t sensitivity_range[2];
7537     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
7538     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
7539     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
7540                       sensitivity_range,
7541                       sizeof(sensitivity_range) / sizeof(int32_t));
7542 
7543     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7544                       &gCamCapability[cameraId]->max_analog_sensitivity,
7545                       1);
7546 
7547     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
7548     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
7549                       &sensor_orientation,
7550                       1);
7551 
7552     int32_t max_output_streams[] = {
7553             MAX_STALLING_STREAMS,
7554             MAX_PROCESSED_STREAMS,
7555             MAX_RAW_STREAMS};
7556     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
7557             max_output_streams,
7558             sizeof(max_output_streams)/sizeof(max_output_streams[0]));
7559 
7560     uint8_t avail_leds = 0;
7561     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
7562                       &avail_leds, 0);
7563 
7564     uint8_t focus_dist_calibrated;
7565     int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
7566             gCamCapability[cameraId]->focus_dist_calibrated);
7567     if (NAME_NOT_FOUND != val) {
7568         focus_dist_calibrated = (uint8_t)val;
7569         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7570                      &focus_dist_calibrated, 1);
7571     }
7572 
7573     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
7574     size = 0;
7575     count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
7576             MAX_TEST_PATTERN_CNT);
7577     for (size_t i = 0; i < count; i++) {
7578         int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
7579                 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
7580         if (NAME_NOT_FOUND != testpatternMode) {
7581             avail_testpattern_modes[size] = testpatternMode;
7582             size++;
7583         }
7584     }
7585     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7586                       avail_testpattern_modes,
7587                       size);
7588 
7589     uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
7590     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
7591                       &max_pipeline_depth,
7592                       1);
7593 
7594     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
7595     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7596                       &partial_result_count,
7597                        1);
7598 
7599     int32_t max_stall_duration = MAX_REPROCESS_STALL;
7600     staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
7601 
7602     Vector<uint8_t> available_capabilities;
7603     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
7604     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
7605     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
7606     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
7607     if (supportBurst) {
7608         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
7609     }
7610     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
7611     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
7612     if (hfrEnable && available_hfr_configs.array()) {
7613         available_capabilities.add(
7614                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
7615     }
7616 
7617     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
7618         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
7619     }
7620     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7621             available_capabilities.array(),
7622             available_capabilities.size());
7623 
7624     //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
7625     //Assumption is that all bayer cameras support MANUAL_SENSOR.
7626     uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7627             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
7628 
7629     staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7630             &aeLockAvailable, 1);
7631 
7632     //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
7633     //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
7634     uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7635             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
7636 
7637     staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7638             &awbLockAvailable, 1);
7639 
7640     int32_t max_input_streams = 1;
7641     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7642                       &max_input_streams,
7643                       1);
7644 
7645     /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
7646     int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
7647             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
7648             HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
7649             HAL_PIXEL_FORMAT_YCbCr_420_888};
7650     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7651                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
7652 
7653     int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
7654     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
7655                       &max_latency,
7656                       1);
7657 
7658     int32_t isp_sensitivity_range[2];
7659     isp_sensitivity_range[0] =
7660         gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
7661     isp_sensitivity_range[1] =
7662         gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
7663     staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7664                       isp_sensitivity_range,
7665                       sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
7666 
7667     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
7668                                            ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
7669     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7670             available_hot_pixel_modes,
7671             sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
7672 
7673     uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
7674                                          ANDROID_SHADING_MODE_FAST,
7675                                          ANDROID_SHADING_MODE_HIGH_QUALITY};
7676     staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
7677                       available_shading_modes,
7678                       3);
7679 
7680     uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
7681                                                   ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
7682     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7683                       available_lens_shading_map_modes,
7684                       2);
7685 
7686     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
7687                                       ANDROID_EDGE_MODE_FAST,
7688                                       ANDROID_EDGE_MODE_HIGH_QUALITY,
7689                                       ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
7690     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7691             available_edge_modes,
7692             sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
7693 
7694     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
7695                                            ANDROID_NOISE_REDUCTION_MODE_FAST,
7696                                            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
7697                                            ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
7698                                            ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
7699     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7700             available_noise_red_modes,
7701             sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
7702 
7703     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
7704                                          ANDROID_TONEMAP_MODE_FAST,
7705                                          ANDROID_TONEMAP_MODE_HIGH_QUALITY};
7706     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7707             available_tonemap_modes,
7708             sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
7709 
7710     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
7711     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7712             available_hot_pixel_map_modes,
7713             sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
7714 
7715     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7716             gCamCapability[cameraId]->reference_illuminant1);
7717     if (NAME_NOT_FOUND != val) {
7718         uint8_t fwkReferenceIlluminant = (uint8_t)val;
7719         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
7720     }
7721 
7722     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7723             gCamCapability[cameraId]->reference_illuminant2);
7724     if (NAME_NOT_FOUND != val) {
7725         uint8_t fwkReferenceIlluminant = (uint8_t)val;
7726         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
7727     }
7728 
7729     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
7730             (void *)gCamCapability[cameraId]->forward_matrix1,
7731             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7732 
7733     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
7734             (void *)gCamCapability[cameraId]->forward_matrix2,
7735             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7736 
7737     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
7738             (void *)gCamCapability[cameraId]->color_transform1,
7739             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7740 
7741     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
7742             (void *)gCamCapability[cameraId]->color_transform2,
7743             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7744 
7745     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
7746             (void *)gCamCapability[cameraId]->calibration_transform1,
7747             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7748 
7749     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
7750             (void *)gCamCapability[cameraId]->calibration_transform2,
7751             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7752 
7753     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
7754        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
7755        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
7756        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7757        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
7758        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7759        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
7760        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
7761        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
7762        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
7763        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
7764        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
7765        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7766        ANDROID_JPEG_GPS_COORDINATES,
7767        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
7768        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
7769        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
7770        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7771        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7772        ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7773        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7774        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7775        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7776        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
7777        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7778        ANDROID_STATISTICS_FACE_DETECT_MODE,
7779        ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7780        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7781        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7782        ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7783        /* DevCamDebug metadata request_keys_basic */
7784        DEVCAMDEBUG_META_ENABLE,
7785        /* DevCamDebug metadata end */
7786        };
7787 
7788     size_t request_keys_cnt =
7789             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7790     Vector<int32_t> available_request_keys;
7791     available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7792     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7793         available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7794     }
7795 
7796     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7797             available_request_keys.array(), available_request_keys.size());
7798 
7799     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7800        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7801        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7802        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7803        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7804        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7805        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7806        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7807        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7808        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7809        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7810        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7811        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7812        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7813        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7814        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7815        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7816        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7817        ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7818        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7819        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7820        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
7821        ANDROID_STATISTICS_FACE_SCORES,
7822        NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7823        NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
7824        // DevCamDebug metadata result_keys_basic
7825        DEVCAMDEBUG_META_ENABLE,
7826        // DevCamDebug metadata result_keys AF
7827        DEVCAMDEBUG_AF_LENS_POSITION,
7828        DEVCAMDEBUG_AF_TOF_CONFIDENCE,
7829        DEVCAMDEBUG_AF_TOF_DISTANCE,
7830        DEVCAMDEBUG_AF_LUMA,
7831        DEVCAMDEBUG_AF_HAF_STATE,
7832        DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
7833        DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
7834        DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
7835        DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
7836        DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
7837        DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
7838        DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
7839        DEVCAMDEBUG_AF_MONITOR_REFOCUS,
7840        DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
7841        DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
7842        DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
7843        DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
7844        DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
7845        DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
7846        DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
7847        DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
7848        DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
7849        DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
7850        DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
7851        DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
7852        DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
7853        DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
7854        // DevCamDebug metadata result_keys AEC
7855        DEVCAMDEBUG_AEC_TARGET_LUMA,
7856        DEVCAMDEBUG_AEC_COMP_LUMA,
7857        DEVCAMDEBUG_AEC_AVG_LUMA,
7858        DEVCAMDEBUG_AEC_CUR_LUMA,
7859        DEVCAMDEBUG_AEC_LINECOUNT,
7860        DEVCAMDEBUG_AEC_REAL_GAIN,
7861        DEVCAMDEBUG_AEC_EXP_INDEX,
7862        DEVCAMDEBUG_AEC_LUX_IDX,
7863        // DevCamDebug metadata result_keys AWB
7864        DEVCAMDEBUG_AWB_R_GAIN,
7865        DEVCAMDEBUG_AWB_G_GAIN,
7866        DEVCAMDEBUG_AWB_B_GAIN,
7867        DEVCAMDEBUG_AWB_CCT,
7868        DEVCAMDEBUG_AWB_DECISION,
7869        /* DevCamDebug metadata end */
7870        };
7871     size_t result_keys_cnt =
7872             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
7873 
7874     Vector<int32_t> available_result_keys;
7875     available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
7876     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7877         available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
7878     }
7879     if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
7880         available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
7881         available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
7882     }
7883     if (supportedFaceDetectMode == 1) {
7884         available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
7885         available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
7886     } else if ((supportedFaceDetectMode == 2) ||
7887             (supportedFaceDetectMode == 3)) {
7888         available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
7889         available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
7890     }
7891     if (hasBlackRegions) {
7892         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
7893         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
7894     }
7895     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7896             available_result_keys.array(), available_result_keys.size());
7897 
7898     int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7899        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7900        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
7901        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
7902        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7903        ANDROID_SCALER_CROPPING_TYPE,
7904        ANDROID_SYNC_MAX_LATENCY,
7905        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7906        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7907        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7908        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
7909        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
7910        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7911        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7912        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7913        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7914        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7915        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7916        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7917        ANDROID_LENS_FACING,
7918        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7919        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7920        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7921        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7922        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7923        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7924        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7925        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
7926        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
7927        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
7928        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
7929        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
7930        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7931        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7932        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7933        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7934        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
7935        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7936        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7937        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7938        ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7939        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7940        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7941        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7942        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7943        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7944        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7945        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7946        ANDROID_TONEMAP_MAX_CURVE_POINTS,
7947        ANDROID_CONTROL_AVAILABLE_MODES,
7948        ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7949        ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7950        ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7951        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7952        ANDROID_SHADING_AVAILABLE_MODES,
7953        ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7954        ANDROID_SENSOR_OPAQUE_RAW_SIZE };
7955 
7956     Vector<int32_t> available_characteristics_keys;
7957     available_characteristics_keys.appendArray(characteristics_keys_basic,
7958             sizeof(characteristics_keys_basic)/sizeof(int32_t));
7959     if (hasBlackRegions) {
7960         available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
7961     }
7962     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
7963                       available_characteristics_keys.array(),
7964                       available_characteristics_keys.size());
7965 
7966     /*available stall durations depend on the hw + sw and will be different for different devices */
7967     /*have to add for raw after implementation*/
7968     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
7969     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
7970 
7971     Vector<int64_t> available_stall_durations;
7972     for (uint32_t j = 0; j < stall_formats_count; j++) {
7973         if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
7974             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7975                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7976                 available_stall_durations.add(stall_formats[j]);
7977                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7978                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7979                 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
7980           }
7981         } else {
7982             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7983                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7984                 available_stall_durations.add(stall_formats[j]);
7985                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7986                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7987                 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
7988             }
7989         }
7990     }
7991     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
7992                       available_stall_durations.array(),
7993                       available_stall_durations.size());
7994 
7995     //QCAMERA3_OPAQUE_RAW
7996     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7997     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7998     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
7999     case LEGACY_RAW:
8000         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8001             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
8002         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8003             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8004         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8005             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
8006         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8007         break;
8008     case MIPI_RAW:
8009         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8010             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
8011         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8012             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
8013         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8014             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
8015         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
8016         break;
8017     default:
8018         LOGE("unknown opaque_raw_format %d",
8019                 gCamCapability[cameraId]->opaque_raw_fmt);
8020         break;
8021     }
8022     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
8023 
8024     Vector<int32_t> strides;
8025     for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8026             gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8027         cam_stream_buf_plane_info_t buf_planes;
8028         strides.add(gCamCapability[cameraId]->raw_dim[i].width);
8029         strides.add(gCamCapability[cameraId]->raw_dim[i].height);
8030         mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8031             &gCamCapability[cameraId]->padding_info, &buf_planes);
8032         strides.add(buf_planes.plane_info.mp[0].stride);
8033     }
8034     staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
8035             strides.size());
8036 
8037     Vector<int32_t> opaque_size;
8038     for (size_t j = 0; j < scalar_formats_count; j++) {
8039         if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
8040             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8041                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8042                 cam_stream_buf_plane_info_t buf_planes;
8043 
8044                 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8045                          &gCamCapability[cameraId]->padding_info, &buf_planes);
8046 
8047                 if (rc == 0) {
8048                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
8049                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
8050                     opaque_size.add(buf_planes.plane_info.frame_len);
8051                 }else {
8052                     LOGE("raw frame calculation failed!");
8053                 }
8054             }
8055         }
8056     }
8057 
8058     if ((opaque_size.size() > 0) &&
8059             (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
8060         staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
8061     else
8062         LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
8063 
8064     gStaticMetadata[cameraId] = staticInfo.release();
8065     return rc;
8066 }
8067 
8068 /*===========================================================================
8069  * FUNCTION   : makeTable
8070  *
8071  * DESCRIPTION: make a table of sizes
8072  *
8073  * PARAMETERS :
8074  *
8075  *
8076  *==========================================================================*/
makeTable(cam_dimension_t * dimTable,size_t size,size_t max_size,int32_t * sizeTable)8077 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
8078         size_t max_size, int32_t *sizeTable)
8079 {
8080     size_t j = 0;
8081     if (size > max_size) {
8082        size = max_size;
8083     }
8084     for (size_t i = 0; i < size; i++) {
8085         sizeTable[j] = dimTable[i].width;
8086         sizeTable[j+1] = dimTable[i].height;
8087         j+=2;
8088     }
8089 }
8090 
8091 /*===========================================================================
8092  * FUNCTION   : makeFPSTable
8093  *
8094  * DESCRIPTION: make a table of fps ranges
8095  *
8096  * PARAMETERS :
8097  *
8098  *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,size_t size,size_t max_size,int32_t * fpsRangesTable)8099 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
8100         size_t max_size, int32_t *fpsRangesTable)
8101 {
8102     size_t j = 0;
8103     if (size > max_size) {
8104        size = max_size;
8105     }
8106     for (size_t i = 0; i < size; i++) {
8107         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
8108         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
8109         j+=2;
8110     }
8111 }
8112 
8113 /*===========================================================================
8114  * FUNCTION   : makeOverridesList
8115  *
8116  * DESCRIPTION: make a list of scene mode overrides
8117  *
8118  * PARAMETERS :
8119  *
8120  *
8121  *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,size_t size,size_t max_size,uint8_t * overridesList,uint8_t * supported_indexes,uint32_t camera_id)8122 void QCamera3HardwareInterface::makeOverridesList(
8123         cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
8124         uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
8125 {
8126     /*daemon will give a list of overrides for all scene modes.
8127       However we should send the fwk only the overrides for the scene modes
8128       supported by the framework*/
8129     size_t j = 0;
8130     if (size > max_size) {
8131        size = max_size;
8132     }
8133     size_t focus_count = CAM_FOCUS_MODE_MAX;
8134     focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
8135             focus_count);
8136     for (size_t i = 0; i < size; i++) {
8137         bool supt = false;
8138         size_t index = supported_indexes[i];
8139         overridesList[j] = gCamCapability[camera_id]->flash_available ?
8140                 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
8141         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8142                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8143                 overridesTable[index].awb_mode);
8144         if (NAME_NOT_FOUND != val) {
8145             overridesList[j+1] = (uint8_t)val;
8146         }
8147         uint8_t focus_override = overridesTable[index].af_mode;
8148         for (size_t k = 0; k < focus_count; k++) {
8149            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
8150               supt = true;
8151               break;
8152            }
8153         }
8154         if (supt) {
8155             val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8156                     focus_override);
8157             if (NAME_NOT_FOUND != val) {
8158                 overridesList[j+2] = (uint8_t)val;
8159             }
8160         } else {
8161            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
8162         }
8163         j+=3;
8164     }
8165 }
8166 
8167 /*===========================================================================
8168  * FUNCTION   : filterJpegSizes
8169  *
8170  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
8171  *              could be downscaled to
8172  *
8173  * PARAMETERS :
8174  *
8175  * RETURN     : length of jpegSizes array
8176  *==========================================================================*/
8177 
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,size_t processedSizesCnt,size_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)8178 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
8179         size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
8180         uint8_t downscale_factor)
8181 {
8182     if (0 == downscale_factor) {
8183         downscale_factor = 1;
8184     }
8185 
8186     int32_t min_width = active_array_size.width / downscale_factor;
8187     int32_t min_height = active_array_size.height / downscale_factor;
8188     size_t jpegSizesCnt = 0;
8189     if (processedSizesCnt > maxCount) {
8190         processedSizesCnt = maxCount;
8191     }
8192     for (size_t i = 0; i < processedSizesCnt; i+=2) {
8193         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
8194             jpegSizes[jpegSizesCnt] = processedSizes[i];
8195             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
8196             jpegSizesCnt += 2;
8197         }
8198     }
8199     return jpegSizesCnt;
8200 }
8201 
8202 /*===========================================================================
8203  * FUNCTION   : computeNoiseModelEntryS
8204  *
8205  * DESCRIPTION: function to map a given sensitivity to the S noise
8206  *              model parameters in the DNG noise model.
8207  *
8208  * PARAMETERS : sens : the sensor sensitivity
8209  *
8210  ** RETURN    : S (sensor amplification) noise
8211  *
8212  *==========================================================================*/
computeNoiseModelEntryS(int32_t sens)8213 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
8214     double s = gCamCapability[mCameraId]->gradient_S * sens +
8215             gCamCapability[mCameraId]->offset_S;
8216     return ((s < 0.0) ? 0.0 : s);
8217 }
8218 
8219 /*===========================================================================
8220  * FUNCTION   : computeNoiseModelEntryO
8221  *
8222  * DESCRIPTION: function to map a given sensitivity to the O noise
8223  *              model parameters in the DNG noise model.
8224  *
8225  * PARAMETERS : sens : the sensor sensitivity
8226  *
8227  ** RETURN    : O (sensor readout) noise
8228  *
8229  *==========================================================================*/
computeNoiseModelEntryO(int32_t sens)8230 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
8231     int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
8232     double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
8233             1.0 : (1.0 * sens / max_analog_sens);
8234     double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
8235             gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
8236     return ((o < 0.0) ? 0.0 : o);
8237 }
8238 
8239 /*===========================================================================
8240  * FUNCTION   : getSensorSensitivity
8241  *
8242  * DESCRIPTION: convert iso_mode to an integer value
8243  *
8244  * PARAMETERS : iso_mode : the iso_mode supported by sensor
8245  *
8246  ** RETURN    : sensitivity supported by sensor
8247  *
8248  *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)8249 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
8250 {
8251     int32_t sensitivity;
8252 
8253     switch (iso_mode) {
8254     case CAM_ISO_MODE_100:
8255         sensitivity = 100;
8256         break;
8257     case CAM_ISO_MODE_200:
8258         sensitivity = 200;
8259         break;
8260     case CAM_ISO_MODE_400:
8261         sensitivity = 400;
8262         break;
8263     case CAM_ISO_MODE_800:
8264         sensitivity = 800;
8265         break;
8266     case CAM_ISO_MODE_1600:
8267         sensitivity = 1600;
8268         break;
8269     default:
8270         sensitivity = -1;
8271         break;
8272     }
8273     return sensitivity;
8274 }
8275 
8276 /*===========================================================================
8277  * FUNCTION   : getCamInfo
8278  *
8279  * DESCRIPTION: query camera capabilities
8280  *
8281  * PARAMETERS :
8282  *   @cameraId  : camera Id
8283  *   @info      : camera info struct to be filled in with camera capabilities
8284  *
8285  * RETURN     : int type of status
8286  *              NO_ERROR  -- success
8287  *              none-zero failure code
8288  *==========================================================================*/
getCamInfo(uint32_t cameraId,struct camera_info * info)8289 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
8290         struct camera_info *info)
8291 {
8292     ATRACE_CALL();
8293     int rc = 0;
8294 
8295     pthread_mutex_lock(&gCamLock);
8296     if (NULL == gCamCapability[cameraId]) {
8297         rc = initCapabilities(cameraId);
8298         if (rc < 0) {
8299             pthread_mutex_unlock(&gCamLock);
8300             return rc;
8301         }
8302     }
8303 
8304     if (NULL == gStaticMetadata[cameraId]) {
8305         rc = initStaticMetadata(cameraId);
8306         if (rc < 0) {
8307             pthread_mutex_unlock(&gCamLock);
8308             return rc;
8309         }
8310     }
8311 
8312     switch(gCamCapability[cameraId]->position) {
8313     case CAM_POSITION_BACK:
8314         info->facing = CAMERA_FACING_BACK;
8315         break;
8316 
8317     case CAM_POSITION_FRONT:
8318         info->facing = CAMERA_FACING_FRONT;
8319         break;
8320 
8321     default:
8322         LOGE("Unknown position type for camera id:%d", cameraId);
8323         rc = -1;
8324         break;
8325     }
8326 
8327 
8328     info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
8329     info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
8330     info->static_camera_characteristics = gStaticMetadata[cameraId];
8331 
8332     //For now assume both cameras can operate independently.
8333     info->conflicting_devices = NULL;
8334     info->conflicting_devices_length = 0;
8335 
8336     //resource cost is 100 * MIN(1.0, m/M),
8337     //where m is throughput requirement with maximum stream configuration
8338     //and M is CPP maximum throughput.
8339     float max_fps = 0.0;
8340     for (uint32_t i = 0;
8341             i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
8342         if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
8343             max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
8344     }
8345     float ratio = 1.0 * MAX_PROCESSED_STREAMS *
8346             gCamCapability[cameraId]->active_array_size.width *
8347             gCamCapability[cameraId]->active_array_size.height * max_fps /
8348             gCamCapability[cameraId]->max_pixel_bandwidth;
8349     info->resource_cost = 100 * MIN(1.0, ratio);
8350     LOGI("camera %d resource cost is %d", cameraId,
8351             info->resource_cost);
8352 
8353     pthread_mutex_unlock(&gCamLock);
8354     return rc;
8355 }
8356 
8357 /*===========================================================================
8358  * FUNCTION   : translateCapabilityToMetadata
8359  *
8360  * DESCRIPTION: translate the capability into camera_metadata_t
8361  *
8362  * PARAMETERS : type of the request
8363  *
8364  *
8365  * RETURN     : success: camera_metadata_t*
8366  *              failure: NULL
8367  *
8368  *==========================================================================*/
translateCapabilityToMetadata(int type)8369 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
8370 {
8371     if (mDefaultMetadata[type] != NULL) {
8372         return mDefaultMetadata[type];
8373     }
8374     //first time we are handling this request
8375     //fill up the metadata structure using the wrapper class
8376     CameraMetadata settings;
8377     //translate from cam_capability_t to camera_metadata_tag_t
8378     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
8379     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
8380     int32_t defaultRequestID = 0;
8381     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
8382 
8383     /* OIS disable */
8384     char ois_prop[PROPERTY_VALUE_MAX];
8385     memset(ois_prop, 0, sizeof(ois_prop));
8386     property_get("persist.camera.ois.disable", ois_prop, "0");
8387     uint8_t ois_disable = (uint8_t)atoi(ois_prop);
8388 
8389     /* Force video to use OIS */
8390     char videoOisProp[PROPERTY_VALUE_MAX];
8391     memset(videoOisProp, 0, sizeof(videoOisProp));
8392     property_get("persist.camera.ois.video", videoOisProp, "1");
8393     uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
8394 
8395     // EIS enable/disable
8396     char eis_prop[PROPERTY_VALUE_MAX];
8397     memset(eis_prop, 0, sizeof(eis_prop));
8398     property_get("persist.camera.eis.enable", eis_prop, "0");
8399     const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
8400 
8401     // Hybrid AE enable/disable
8402     char hybrid_ae_prop[PROPERTY_VALUE_MAX];
8403     memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
8404     property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
8405     const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
8406 
8407     const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
8408     // This is a bit hacky. EIS is enabled only when the above setprop
8409     // is set to non-zero value and on back camera (for 2015 Nexus).
8410     // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
8411     // configureStream is called before this function. In other words,
8412     // we cannot guarantee the app will call configureStream before
8413     // calling createDefaultRequest.
8414     const bool eisEnabled = facingBack && eis_prop_set;
8415 
8416     uint8_t controlIntent = 0;
8417     uint8_t focusMode;
8418     uint8_t vsMode;
8419     uint8_t optStabMode;
8420     uint8_t cacMode;
8421     uint8_t edge_mode;
8422     uint8_t noise_red_mode;
8423     uint8_t tonemap_mode;
8424     bool highQualityModeEntryAvailable = FALSE;
8425     bool fastModeEntryAvailable = FALSE;
8426     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
8427     optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8428     switch (type) {
8429       case CAMERA3_TEMPLATE_PREVIEW:
8430         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
8431         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8432         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8433         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8434         edge_mode = ANDROID_EDGE_MODE_FAST;
8435         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8436         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8437         break;
8438       case CAMERA3_TEMPLATE_STILL_CAPTURE:
8439         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
8440         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8441         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8442         edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
8443         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
8444         tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
8445         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8446         // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
8447         for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8448             if (gCamCapability[mCameraId]->aberration_modes[i] ==
8449                     CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8450                 highQualityModeEntryAvailable = TRUE;
8451             } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
8452                     CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8453                 fastModeEntryAvailable = TRUE;
8454             }
8455         }
8456         if (highQualityModeEntryAvailable) {
8457             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
8458         } else if (fastModeEntryAvailable) {
8459             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8460         }
8461         break;
8462       case CAMERA3_TEMPLATE_VIDEO_RECORD:
8463         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
8464         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8465         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8466         if (eisEnabled) {
8467             vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8468         }
8469         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8470         edge_mode = ANDROID_EDGE_MODE_FAST;
8471         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8472         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8473         if (forceVideoOis)
8474             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8475         break;
8476       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
8477         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
8478         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8479         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8480         if (eisEnabled) {
8481             vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8482         }
8483         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8484         edge_mode = ANDROID_EDGE_MODE_FAST;
8485         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8486         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8487         if (forceVideoOis)
8488             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8489         break;
8490       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
8491         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
8492         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8493         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8494         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8495         edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
8496         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
8497         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8498         break;
8499       case CAMERA3_TEMPLATE_MANUAL:
8500         edge_mode = ANDROID_EDGE_MODE_FAST;
8501         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8502         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8503         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8504         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
8505         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8506         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8507         break;
8508       default:
8509         edge_mode = ANDROID_EDGE_MODE_FAST;
8510         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8511         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8512         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8513         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
8514         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8515         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8516         break;
8517     }
8518     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
8519     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
8520     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
8521     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
8522         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8523     }
8524     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
8525 
8526     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8527             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
8528         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8529     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8530             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
8531             || ois_disable)
8532         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8533     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
8534 
8535     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8536             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
8537 
8538     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
8539     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
8540 
8541     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
8542     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
8543 
8544     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
8545     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
8546 
8547     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
8548     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
8549 
8550     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
8551     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
8552 
8553     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
8554     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
8555 
8556     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
8557     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
8558 
8559     /*flash*/
8560     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
8561     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
8562 
8563     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
8564     settings.update(ANDROID_FLASH_FIRING_POWER,
8565             &flashFiringLevel, 1);
8566 
8567     /* lens */
8568     float default_aperture = gCamCapability[mCameraId]->apertures[0];
8569     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
8570 
8571     if (gCamCapability[mCameraId]->filter_densities_count) {
8572         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
8573         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
8574                         gCamCapability[mCameraId]->filter_densities_count);
8575     }
8576 
8577     float default_focal_length = gCamCapability[mCameraId]->focal_length;
8578     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
8579 
8580     if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
8581         float default_focus_distance = 0;
8582         settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
8583     }
8584 
8585     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
8586     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
8587 
8588     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8589     settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8590 
8591     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
8592     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
8593 
8594     /* face detection (default to OFF) */
8595     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
8596     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
8597 
8598     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
8599     settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
8600 
8601     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
8602     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
8603 
8604     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8605     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8606 
8607     static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8608     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
8609 
8610     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8611     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
8612 
8613     /* Exposure time(Update the Min Exposure Time)*/
8614     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
8615     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
8616 
8617     /* frame duration */
8618     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
8619     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
8620 
8621     /* sensitivity */
8622     static const int32_t default_sensitivity = 100;
8623     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
8624     static const int32_t default_isp_sensitivity =
8625             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
8626     settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
8627 
8628     /*edge mode*/
8629     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
8630 
8631     /*noise reduction mode*/
8632     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
8633 
8634     /*color correction mode*/
8635     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
8636     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
8637 
8638     /*transform matrix mode*/
8639     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
8640 
8641     int32_t scaler_crop_region[4];
8642     scaler_crop_region[0] = 0;
8643     scaler_crop_region[1] = 0;
8644     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
8645     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
8646     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
8647 
8648     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
8649     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
8650 
8651     /*focus distance*/
8652     float focus_distance = 0.0;
8653     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
8654 
8655     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
8656     /* Restrict default preview template to max 30 fps */
8657     float max_range = 0.0;
8658     float max_fixed_fps = 0.0;
8659     int32_t fps_range[2] = {0, 0};
8660     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
8661             i++) {
8662         if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
8663                 TEMPLATE_MAX_PREVIEW_FPS) {
8664             continue;
8665         }
8666         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
8667             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8668         if (type == CAMERA3_TEMPLATE_PREVIEW ||
8669                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
8670                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
8671             if (range > max_range) {
8672                 fps_range[0] =
8673                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8674                 fps_range[1] =
8675                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8676                 max_range = range;
8677             }
8678         } else {
8679             if (range < 0.01 && max_fixed_fps <
8680                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
8681                 fps_range[0] =
8682                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8683                 fps_range[1] =
8684                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8685                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8686             }
8687         }
8688     }
8689     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
8690 
8691     /*precapture trigger*/
8692     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
8693     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
8694 
8695     /*af trigger*/
8696     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
8697     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
8698 
8699     /* ae & af regions */
8700     int32_t active_region[] = {
8701             gCamCapability[mCameraId]->active_array_size.left,
8702             gCamCapability[mCameraId]->active_array_size.top,
8703             gCamCapability[mCameraId]->active_array_size.left +
8704                     gCamCapability[mCameraId]->active_array_size.width,
8705             gCamCapability[mCameraId]->active_array_size.top +
8706                     gCamCapability[mCameraId]->active_array_size.height,
8707             0};
8708     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
8709             sizeof(active_region) / sizeof(active_region[0]));
8710     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
8711             sizeof(active_region) / sizeof(active_region[0]));
8712 
8713     /* black level lock */
8714     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8715     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
8716 
8717     /* lens shading map mode */
8718     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8719     if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
8720         shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
8721     }
8722     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
8723 
8724     //special defaults for manual template
8725     if (type == CAMERA3_TEMPLATE_MANUAL) {
8726         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
8727         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
8728 
8729         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
8730         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
8731 
8732         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
8733         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
8734 
8735         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
8736         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
8737 
8738         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
8739         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
8740 
8741         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
8742         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
8743     }
8744 
8745 
8746     /* TNR
8747      * We'll use this location to determine which modes TNR will be set.
8748      * We will enable TNR to be on if either of the Preview/Video stream requires TNR
8749      * This is not to be confused with linking on a per stream basis that decision
8750      * is still on per-session basis and will be handled as part of config stream
8751      */
8752     uint8_t tnr_enable = 0;
8753 
8754     if (m_bTnrPreview || m_bTnrVideo) {
8755 
8756         switch (type) {
8757             case CAMERA3_TEMPLATE_VIDEO_RECORD:
8758                     tnr_enable = 1;
8759                     break;
8760 
8761             default:
8762                     tnr_enable = 0;
8763                     break;
8764         }
8765 
8766         int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
8767         settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8768         settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8769 
8770         LOGD("TNR:%d with process plate %d for template:%d",
8771                              tnr_enable, tnr_process_type, type);
8772     }
8773 
8774     //Update Link tags to default
8775     int32_t sync_type = CAM_TYPE_STANDALONE;
8776     settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
8777 
8778     int32_t is_main = 0; //this doesn't matter as app should overwrite
8779     settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
8780 
8781     settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
8782 
8783     /* CDS default */
8784     char prop[PROPERTY_VALUE_MAX];
8785     memset(prop, 0, sizeof(prop));
8786     property_get("persist.camera.CDS", prop, "Auto");
8787     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
8788     cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
8789     if (CAM_CDS_MODE_MAX == cds_mode) {
8790         cds_mode = CAM_CDS_MODE_AUTO;
8791     }
8792 
8793     /* Disabling CDS in templates which have TNR enabled*/
8794     if (tnr_enable)
8795         cds_mode = CAM_CDS_MODE_OFF;
8796 
8797     int32_t mode = cds_mode;
8798     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
8799 
8800     /* hybrid ae */
8801     settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
8802 
8803     mDefaultMetadata[type] = settings.release();
8804 
8805     return mDefaultMetadata[type];
8806 }
8807 
8808 /*===========================================================================
8809  * FUNCTION   : setFrameParameters
8810  *
8811  * DESCRIPTION: set parameters per frame as requested in the metadata from
8812  *              framework
8813  *
8814  * PARAMETERS :
8815  *   @request   : request that needs to be serviced
8816  *   @streamsArray : Stream ID of all the requested streams
8817  *   @blob_request: Whether this request is a blob request or not
8818  *
8819  * RETURN     : success: NO_ERROR
8820  *              failure:
8821  *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamsArray,int blob_request,uint32_t snapshotStreamId)8822 int QCamera3HardwareInterface::setFrameParameters(
8823                     camera3_capture_request_t *request,
8824                     cam_stream_ID_t streamsArray,
8825                     int blob_request,
8826                     uint32_t snapshotStreamId)
8827 {
8828     /*translate from camera_metadata_t type to parm_type_t*/
8829     int rc = 0;
8830     int32_t hal_version = CAM_HAL_V3;
8831 
8832     clear_metadata_buffer(mParameters);
8833     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8834         LOGE("Failed to set hal version in the parameters");
8835         return BAD_VALUE;
8836     }
8837 
8838     /*we need to update the frame number in the parameters*/
8839     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8840             request->frame_number)) {
8841         LOGE("Failed to set the frame number in the parameters");
8842         return BAD_VALUE;
8843     }
8844 
8845     /* Update stream id of all the requested buffers */
8846     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
8847         LOGE("Failed to set stream type mask in the parameters");
8848         return BAD_VALUE;
8849     }
8850 
8851     if (mUpdateDebugLevel) {
8852         uint32_t dummyDebugLevel = 0;
8853         /* The value of dummyDebugLevel is irrelavent. On
8854          * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
8855         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
8856                 dummyDebugLevel)) {
8857             LOGE("Failed to set UPDATE_DEBUG_LEVEL");
8858             return BAD_VALUE;
8859         }
8860         mUpdateDebugLevel = false;
8861     }
8862 
8863     if(request->settings != NULL){
8864         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
8865         if (blob_request)
8866             memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
8867     }
8868 
8869     return rc;
8870 }
8871 
8872 /*===========================================================================
8873  * FUNCTION   : setReprocParameters
8874  *
8875  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
8876  *              return it.
8877  *
8878  * PARAMETERS :
8879  *   @request   : request that needs to be serviced
8880  *
8881  * RETURN     : success: NO_ERROR
8882  *              failure:
8883  *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)8884 int32_t QCamera3HardwareInterface::setReprocParameters(
8885         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
8886         uint32_t snapshotStreamId)
8887 {
8888     /*translate from camera_metadata_t type to parm_type_t*/
8889     int rc = 0;
8890 
8891     if (NULL == request->settings){
8892         LOGE("Reprocess settings cannot be NULL");
8893         return BAD_VALUE;
8894     }
8895 
8896     if (NULL == reprocParam) {
8897         LOGE("Invalid reprocessing metadata buffer");
8898         return BAD_VALUE;
8899     }
8900     clear_metadata_buffer(reprocParam);
8901 
8902     /*we need to update the frame number in the parameters*/
8903     if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
8904             request->frame_number)) {
8905         LOGE("Failed to set the frame number in the parameters");
8906         return BAD_VALUE;
8907     }
8908 
8909     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
8910     if (rc < 0) {
8911         LOGE("Failed to translate reproc request");
8912         return rc;
8913     }
8914 
8915     CameraMetadata frame_settings;
8916     frame_settings = request->settings;
8917     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
8918             frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
8919         int32_t *crop_count =
8920                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
8921         int32_t *crop_data =
8922                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
8923         int32_t *roi_map =
8924                 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
8925         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
8926             cam_crop_data_t crop_meta;
8927             memset(&crop_meta, 0, sizeof(cam_crop_data_t));
8928             crop_meta.num_of_streams = 1;
8929             crop_meta.crop_info[0].crop.left   = crop_data[0];
8930             crop_meta.crop_info[0].crop.top    = crop_data[1];
8931             crop_meta.crop_info[0].crop.width  = crop_data[2];
8932             crop_meta.crop_info[0].crop.height = crop_data[3];
8933 
8934             crop_meta.crop_info[0].roi_map.left =
8935                     roi_map[0];
8936             crop_meta.crop_info[0].roi_map.top =
8937                     roi_map[1];
8938             crop_meta.crop_info[0].roi_map.width =
8939                     roi_map[2];
8940             crop_meta.crop_info[0].roi_map.height =
8941                     roi_map[3];
8942 
8943             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
8944                 rc = BAD_VALUE;
8945             }
8946             LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
8947                     request->input_buffer->stream,
8948                     crop_meta.crop_info[0].crop.left,
8949                     crop_meta.crop_info[0].crop.top,
8950                     crop_meta.crop_info[0].crop.width,
8951                     crop_meta.crop_info[0].crop.height);
8952             LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
8953                     request->input_buffer->stream,
8954                     crop_meta.crop_info[0].roi_map.left,
8955                     crop_meta.crop_info[0].roi_map.top,
8956                     crop_meta.crop_info[0].roi_map.width,
8957                     crop_meta.crop_info[0].roi_map.height);
8958             } else {
8959                 LOGE("Invalid reprocess crop count %d!", *crop_count);
8960             }
8961     } else {
8962         LOGE("No crop data from matching output stream");
8963     }
8964 
8965     /* These settings are not needed for regular requests so handle them specially for
8966        reprocess requests; information needed for EXIF tags */
8967     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8968         int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8969                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8970         if (NAME_NOT_FOUND != val) {
8971             uint32_t flashMode = (uint32_t)val;
8972             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
8973                 rc = BAD_VALUE;
8974             }
8975         } else {
8976             LOGE("Could not map fwk flash mode %d to correct hal flash mode",
8977                     frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8978         }
8979     } else {
8980         LOGH("No flash mode in reprocess settings");
8981     }
8982 
8983     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
8984         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
8985         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
8986             rc = BAD_VALUE;
8987         }
8988     } else {
8989         LOGH("No flash state in reprocess settings");
8990     }
8991 
8992     return rc;
8993 }
8994 
8995 /*===========================================================================
8996  * FUNCTION   : saveRequestSettings
8997  *
8998  * DESCRIPTION: Add any settings that might have changed to the request settings
8999  *              and save the settings to be applied on the frame
9000  *
9001  * PARAMETERS :
9002  *   @jpegMetadata : the extracted and/or modified jpeg metadata
9003  *   @request      : request with initial settings
9004  *
9005  * RETURN     :
9006  * camera_metadata_t* : pointer to the saved request settings
9007  *==========================================================================*/
saveRequestSettings(const CameraMetadata & jpegMetadata,camera3_capture_request_t * request)9008 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
9009         const CameraMetadata &jpegMetadata,
9010         camera3_capture_request_t *request)
9011 {
9012     camera_metadata_t *resultMetadata;
9013     CameraMetadata camMetadata;
9014     camMetadata = request->settings;
9015 
9016     if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9017         int32_t thumbnail_size[2];
9018         thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9019         thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9020         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
9021                 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
9022     }
9023 
9024     resultMetadata = camMetadata.release();
9025     return resultMetadata;
9026 }
9027 
9028 /*===========================================================================
9029  * FUNCTION   : setHalFpsRange
9030  *
9031  * DESCRIPTION: set FPS range parameter
9032  *
9033  *
9034  * PARAMETERS :
9035  *   @settings    : Metadata from framework
9036  *   @hal_metadata: Metadata buffer
9037  *
9038  *
9039  * RETURN     : success: NO_ERROR
9040  *              failure:
9041  *==========================================================================*/
setHalFpsRange(const CameraMetadata & settings,metadata_buffer_t * hal_metadata)9042 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
9043         metadata_buffer_t *hal_metadata)
9044 {
9045     int32_t rc = NO_ERROR;
9046     cam_fps_range_t fps_range;
9047     fps_range.min_fps = (float)
9048             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
9049     fps_range.max_fps = (float)
9050             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
9051     fps_range.video_min_fps = fps_range.min_fps;
9052     fps_range.video_max_fps = fps_range.max_fps;
9053 
9054     LOGD("aeTargetFpsRange fps: [%f %f]",
9055             fps_range.min_fps, fps_range.max_fps);
9056     /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
9057      * follows:
9058      * ---------------------------------------------------------------|
9059      *      Video stream is absent in configure_streams               |
9060      *    (Camcorder preview before the first video record            |
9061      * ---------------------------------------------------------------|
9062      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9063      *                   |             |             | vid_min/max_fps|
9064      * ---------------------------------------------------------------|
9065      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
9066      *                   |-------------|-------------|----------------|
9067      *                   |  [240, 240] |     240     |  [240, 240]    |
9068      * ---------------------------------------------------------------|
9069      *     Video stream is present in configure_streams               |
9070      * ---------------------------------------------------------------|
9071      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9072      *                   |             |             | vid_min/max_fps|
9073      * ---------------------------------------------------------------|
9074      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
9075      * (camcorder prev   |-------------|-------------|----------------|
9076      *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
9077      *  is stopped)      |             |             |                |
9078      * ---------------------------------------------------------------|
9079      *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
9080      *                   |-------------|-------------|----------------|
9081      *                   |  [240, 240] |     240     |  [240, 240]    |
9082      * ---------------------------------------------------------------|
9083      * When Video stream is absent in configure_streams,
9084      * preview fps = sensor_fps / batchsize
9085      * Eg: for 240fps at batchSize 4, preview = 60fps
9086      *     for 120fps at batchSize 4, preview = 30fps
9087      *
9088      * When video stream is present in configure_streams, preview fps is as per
9089      * the ratio of preview buffers to video buffers requested in process
9090      * capture request
9091      */
9092     mBatchSize = 0;
9093     if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
9094         fps_range.min_fps = fps_range.video_max_fps;
9095         fps_range.video_min_fps = fps_range.video_max_fps;
9096         int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
9097                 fps_range.max_fps);
9098         if (NAME_NOT_FOUND != val) {
9099             cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
9100             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9101                 return BAD_VALUE;
9102             }
9103 
9104             if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
9105                 /* If batchmode is currently in progress and the fps changes,
9106                  * set the flag to restart the sensor */
9107                 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
9108                         (mHFRVideoFps != fps_range.max_fps)) {
9109                     mNeedSensorRestart = true;
9110                 }
9111                 mHFRVideoFps = fps_range.max_fps;
9112                 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
9113                 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
9114                     mBatchSize = MAX_HFR_BATCH_SIZE;
9115                 }
9116              }
9117             LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
9118 
9119          }
9120     } else {
9121         /* HFR mode is session param in backend/ISP. This should be reset when
9122          * in non-HFR mode  */
9123         cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
9124         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9125             return BAD_VALUE;
9126         }
9127     }
9128     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
9129         return BAD_VALUE;
9130     }
9131     LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
9132             fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
9133     return rc;
9134 }
9135 
9136 /*===========================================================================
9137  * FUNCTION   : translateToHalMetadata
9138  *
9139  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
9140  *
9141  *
9142  * PARAMETERS :
9143  *   @request  : request sent from framework
9144  *
9145  *
9146  * RETURN     : success: NO_ERROR
9147  *              failure:
9148  *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)9149 int QCamera3HardwareInterface::translateToHalMetadata
9150                                   (const camera3_capture_request_t *request,
9151                                    metadata_buffer_t *hal_metadata,
9152                                    uint32_t snapshotStreamId)
9153 {
9154     int rc = 0;
9155     CameraMetadata frame_settings;
9156     frame_settings = request->settings;
9157 
9158     /* Do not change the order of the following list unless you know what you are
9159      * doing.
9160      * The order is laid out in such a way that parameters in the front of the table
9161      * may be used to override the parameters later in the table. Examples are:
9162      * 1. META_MODE should precede AEC/AWB/AF MODE
9163      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
9164      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
9165      * 4. Any mode should precede it's corresponding settings
9166      */
9167     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
9168         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
9169         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
9170             rc = BAD_VALUE;
9171         }
9172         rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
9173         if (rc != NO_ERROR) {
9174             LOGE("extractSceneMode failed");
9175         }
9176     }
9177 
9178     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9179         uint8_t fwk_aeMode =
9180             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9181         uint8_t aeMode;
9182         int32_t redeye;
9183 
9184         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
9185             aeMode = CAM_AE_MODE_OFF;
9186         } else {
9187             aeMode = CAM_AE_MODE_ON;
9188         }
9189         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
9190             redeye = 1;
9191         } else {
9192             redeye = 0;
9193         }
9194 
9195         int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
9196                 fwk_aeMode);
9197         if (NAME_NOT_FOUND != val) {
9198             int32_t flashMode = (int32_t)val;
9199             ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
9200         }
9201 
9202         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
9203         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
9204             rc = BAD_VALUE;
9205         }
9206     }
9207 
9208     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
9209         uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
9210         int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9211                 fwk_whiteLevel);
9212         if (NAME_NOT_FOUND != val) {
9213             uint8_t whiteLevel = (uint8_t)val;
9214             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
9215                 rc = BAD_VALUE;
9216             }
9217         }
9218     }
9219 
9220     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
9221         uint8_t fwk_cacMode =
9222                 frame_settings.find(
9223                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
9224         int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
9225                 fwk_cacMode);
9226         if (NAME_NOT_FOUND != val) {
9227             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
9228             bool entryAvailable = FALSE;
9229             // Check whether Frameworks set CAC mode is supported in device or not
9230             for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9231                 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
9232                     entryAvailable = TRUE;
9233                     break;
9234                 }
9235             }
9236             LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
9237             // If entry not found then set the device supported mode instead of frameworks mode i.e,
9238             // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
9239             // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
9240             if (entryAvailable == FALSE) {
9241                 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9242                     cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9243                 } else {
9244                     if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9245                         // High is not supported and so set the FAST as spec say's underlying
9246                         // device implementation can be the same for both modes.
9247                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
9248                     } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9249                         // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
9250                         // in order to avoid the fps drop due to high quality
9251                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9252                     } else {
9253                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9254                     }
9255                 }
9256             }
9257             LOGD("Final cacMode is %d", cacMode);
9258             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
9259                 rc = BAD_VALUE;
9260             }
9261         } else {
9262             LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
9263         }
9264     }
9265 
9266     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
9267         uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
9268         int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9269                 fwk_focusMode);
9270         if (NAME_NOT_FOUND != val) {
9271             uint8_t focusMode = (uint8_t)val;
9272             LOGD("set focus mode %d", focusMode);
9273             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
9274                 rc = BAD_VALUE;
9275             }
9276         }
9277     }
9278 
9279     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
9280         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
9281         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
9282                 focalDistance)) {
9283             rc = BAD_VALUE;
9284         }
9285     }
9286 
9287     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
9288         uint8_t fwk_antibandingMode =
9289                 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
9290         int val = lookupHalName(ANTIBANDING_MODES_MAP,
9291                 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
9292         if (NAME_NOT_FOUND != val) {
9293             uint32_t hal_antibandingMode = (uint32_t)val;
9294             if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
9295                 if (m60HzZone) {
9296                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
9297                 } else {
9298                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
9299                 }
9300             }
9301             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
9302                     hal_antibandingMode)) {
9303                 rc = BAD_VALUE;
9304             }
9305         }
9306     }
9307 
9308     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
9309         int32_t expCompensation = frame_settings.find(
9310                 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
9311         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
9312             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
9313         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
9314             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
9315         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
9316                 expCompensation)) {
9317             rc = BAD_VALUE;
9318         }
9319     }
9320 
9321     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
9322         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
9323         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
9324             rc = BAD_VALUE;
9325         }
9326     }
9327     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
9328         rc = setHalFpsRange(frame_settings, hal_metadata);
9329         if (rc != NO_ERROR) {
9330             LOGE("setHalFpsRange failed");
9331         }
9332     }
9333 
9334     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
9335         uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
9336         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
9337             rc = BAD_VALUE;
9338         }
9339     }
9340 
9341     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
9342         uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
9343         int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9344                 fwk_effectMode);
9345         if (NAME_NOT_FOUND != val) {
9346             uint8_t effectMode = (uint8_t)val;
9347             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
9348                 rc = BAD_VALUE;
9349             }
9350         }
9351     }
9352 
9353     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
9354         uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
9355         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
9356                 colorCorrectMode)) {
9357             rc = BAD_VALUE;
9358         }
9359     }
9360 
9361     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
9362         cam_color_correct_gains_t colorCorrectGains;
9363         for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
9364             colorCorrectGains.gains[i] =
9365                     frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
9366         }
9367         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
9368                 colorCorrectGains)) {
9369             rc = BAD_VALUE;
9370         }
9371     }
9372 
9373     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
9374         cam_color_correct_matrix_t colorCorrectTransform;
9375         cam_rational_type_t transform_elem;
9376         size_t num = 0;
9377         for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
9378            for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
9379               transform_elem.numerator =
9380                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
9381               transform_elem.denominator =
9382                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
9383               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
9384               num++;
9385            }
9386         }
9387         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
9388                 colorCorrectTransform)) {
9389             rc = BAD_VALUE;
9390         }
9391     }
9392 
9393     cam_trigger_t aecTrigger;
9394     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
9395     aecTrigger.trigger_id = -1;
9396     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
9397         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
9398         aecTrigger.trigger =
9399             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
9400         aecTrigger.trigger_id =
9401             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
9402         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
9403                 aecTrigger)) {
9404             rc = BAD_VALUE;
9405         }
9406         LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
9407                 aecTrigger.trigger, aecTrigger.trigger_id);
9408     }
9409 
9410     /*af_trigger must come with a trigger id*/
9411     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
9412         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
9413         cam_trigger_t af_trigger;
9414         af_trigger.trigger =
9415             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
9416         af_trigger.trigger_id =
9417             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
9418         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
9419             rc = BAD_VALUE;
9420         }
9421         LOGD("AfTrigger: %d AfTriggerID: %d",
9422                 af_trigger.trigger, af_trigger.trigger_id);
9423     }
9424 
9425     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
9426         int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
9427         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
9428             rc = BAD_VALUE;
9429         }
9430     }
9431     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
9432         cam_edge_application_t edge_application;
9433         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
9434         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
9435             edge_application.sharpness = 0;
9436         } else {
9437             edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
9438         }
9439         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
9440             rc = BAD_VALUE;
9441         }
9442     }
9443 
9444     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9445         int32_t respectFlashMode = 1;
9446         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9447             uint8_t fwk_aeMode =
9448                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9449             if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
9450                 respectFlashMode = 0;
9451                 LOGH("AE Mode controls flash, ignore android.flash.mode");
9452             }
9453         }
9454         if (respectFlashMode) {
9455             int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9456                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9457             LOGH("flash mode after mapping %d", val);
9458             // To check: CAM_INTF_META_FLASH_MODE usage
9459             if (NAME_NOT_FOUND != val) {
9460                 uint8_t flashMode = (uint8_t)val;
9461                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
9462                     rc = BAD_VALUE;
9463                 }
9464             }
9465         }
9466     }
9467 
9468     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
9469         uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
9470         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
9471             rc = BAD_VALUE;
9472         }
9473     }
9474 
9475     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
9476         int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
9477         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
9478                 flashFiringTime)) {
9479             rc = BAD_VALUE;
9480         }
9481     }
9482 
9483     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
9484         uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
9485         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
9486                 hotPixelMode)) {
9487             rc = BAD_VALUE;
9488         }
9489     }
9490 
9491     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
9492         float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
9493         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
9494                 lensAperture)) {
9495             rc = BAD_VALUE;
9496         }
9497     }
9498 
9499     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
9500         float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
9501         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
9502                 filterDensity)) {
9503             rc = BAD_VALUE;
9504         }
9505     }
9506 
9507     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
9508         float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
9509         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
9510                 focalLength)) {
9511             rc = BAD_VALUE;
9512         }
9513     }
9514 
9515     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
9516         uint8_t optStabMode =
9517                 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
9518         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
9519                 optStabMode)) {
9520             rc = BAD_VALUE;
9521         }
9522     }
9523 
9524     if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
9525         uint8_t videoStabMode =
9526                 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
9527         LOGD("videoStabMode from APP = %d", videoStabMode);
9528         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
9529                 videoStabMode)) {
9530             rc = BAD_VALUE;
9531         }
9532     }
9533 
9534 
9535     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
9536         uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
9537         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
9538                 noiseRedMode)) {
9539             rc = BAD_VALUE;
9540         }
9541     }
9542 
9543     if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
9544         float reprocessEffectiveExposureFactor =
9545             frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
9546         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
9547                 reprocessEffectiveExposureFactor)) {
9548             rc = BAD_VALUE;
9549         }
9550     }
9551 
9552     cam_crop_region_t scalerCropRegion;
9553     bool scalerCropSet = false;
9554     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
9555         scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
9556         scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
9557         scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
9558         scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
9559 
9560         // Map coordinate system from active array to sensor output.
9561         mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
9562                 scalerCropRegion.width, scalerCropRegion.height);
9563 
9564         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
9565                 scalerCropRegion)) {
9566             rc = BAD_VALUE;
9567         }
9568         scalerCropSet = true;
9569     }
9570 
9571     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
9572         int64_t sensorExpTime =
9573                 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
9574         LOGD("setting sensorExpTime %lld", sensorExpTime);
9575         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
9576                 sensorExpTime)) {
9577             rc = BAD_VALUE;
9578         }
9579     }
9580 
9581     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
9582         int64_t sensorFrameDuration =
9583                 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
9584         int64_t minFrameDuration = getMinFrameDuration(request);
9585         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
9586         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
9587             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
9588         LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
9589         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
9590                 sensorFrameDuration)) {
9591             rc = BAD_VALUE;
9592         }
9593     }
9594 
9595     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
9596         int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
9597         if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
9598                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
9599         if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
9600                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
9601         LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
9602         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
9603                 sensorSensitivity)) {
9604             rc = BAD_VALUE;
9605         }
9606     }
9607 
9608     if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
9609         int32_t ispSensitivity =
9610             frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
9611         if (ispSensitivity <
9612             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
9613                 ispSensitivity =
9614                     gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9615                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
9616         }
9617         if (ispSensitivity >
9618             gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
9619                 ispSensitivity =
9620                     gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
9621                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
9622         }
9623         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
9624                 ispSensitivity)) {
9625             rc = BAD_VALUE;
9626         }
9627     }
9628 
9629     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
9630         uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
9631         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
9632             rc = BAD_VALUE;
9633         }
9634     }
9635 
9636     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
9637         uint8_t fwk_facedetectMode =
9638                 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
9639 
9640         int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
9641                 fwk_facedetectMode);
9642 
9643         if (NAME_NOT_FOUND != val) {
9644             uint8_t facedetectMode = (uint8_t)val;
9645             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
9646                     facedetectMode)) {
9647                 rc = BAD_VALUE;
9648             }
9649         }
9650     }
9651 
9652     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
9653         uint8_t histogramMode =
9654                 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
9655         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
9656                 histogramMode)) {
9657             rc = BAD_VALUE;
9658         }
9659     }
9660 
9661     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
9662         uint8_t sharpnessMapMode =
9663                 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
9664         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
9665                 sharpnessMapMode)) {
9666             rc = BAD_VALUE;
9667         }
9668     }
9669 
9670     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
9671         uint8_t tonemapMode =
9672                 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
9673         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
9674             rc = BAD_VALUE;
9675         }
9676     }
9677     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
9678     /*All tonemap channels will have the same number of points*/
9679     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
9680         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
9681         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
9682         cam_rgb_tonemap_curves tonemapCurves;
9683         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
9684         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
9685             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
9686                      tonemapCurves.tonemap_points_cnt,
9687                     CAM_MAX_TONEMAP_CURVE_SIZE);
9688             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
9689         }
9690 
9691         /* ch0 = G*/
9692         size_t point = 0;
9693         cam_tonemap_curve_t tonemapCurveGreen;
9694         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9695             for (size_t j = 0; j < 2; j++) {
9696                tonemapCurveGreen.tonemap_points[i][j] =
9697                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
9698                point++;
9699             }
9700         }
9701         tonemapCurves.curves[0] = tonemapCurveGreen;
9702 
9703         /* ch 1 = B */
9704         point = 0;
9705         cam_tonemap_curve_t tonemapCurveBlue;
9706         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9707             for (size_t j = 0; j < 2; j++) {
9708                tonemapCurveBlue.tonemap_points[i][j] =
9709                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
9710                point++;
9711             }
9712         }
9713         tonemapCurves.curves[1] = tonemapCurveBlue;
9714 
9715         /* ch 2 = R */
9716         point = 0;
9717         cam_tonemap_curve_t tonemapCurveRed;
9718         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9719             for (size_t j = 0; j < 2; j++) {
9720                tonemapCurveRed.tonemap_points[i][j] =
9721                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
9722                point++;
9723             }
9724         }
9725         tonemapCurves.curves[2] = tonemapCurveRed;
9726 
9727         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
9728                 tonemapCurves)) {
9729             rc = BAD_VALUE;
9730         }
9731     }
9732 
9733     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
9734         uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
9735         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
9736                 captureIntent)) {
9737             rc = BAD_VALUE;
9738         }
9739     }
9740 
9741     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
9742         uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
9743         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
9744                 blackLevelLock)) {
9745             rc = BAD_VALUE;
9746         }
9747     }
9748 
9749     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
9750         uint8_t lensShadingMapMode =
9751                 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
9752         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
9753                 lensShadingMapMode)) {
9754             rc = BAD_VALUE;
9755         }
9756     }
9757 
9758     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
9759         cam_area_t roi;
9760         bool reset = true;
9761         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
9762 
9763         // Map coordinate system from active array to sensor output.
9764         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9765                 roi.rect.height);
9766 
9767         if (scalerCropSet) {
9768             reset = resetIfNeededROI(&roi, &scalerCropRegion);
9769         }
9770         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
9771             rc = BAD_VALUE;
9772         }
9773     }
9774 
9775     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
9776         cam_area_t roi;
9777         bool reset = true;
9778         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
9779 
9780         // Map coordinate system from active array to sensor output.
9781         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9782                 roi.rect.height);
9783 
9784         if (scalerCropSet) {
9785             reset = resetIfNeededROI(&roi, &scalerCropRegion);
9786         }
9787         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
9788             rc = BAD_VALUE;
9789         }
9790     }
9791 
9792     // CDS for non-HFR non-video mode
9793     if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
9794             !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
9795         int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
9796         if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
9797             LOGE("Invalid CDS mode %d!", *fwk_cds);
9798         } else {
9799             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9800                     CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
9801                 rc = BAD_VALUE;
9802             }
9803         }
9804     }
9805 
9806     // TNR
9807     if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
9808         frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
9809         uint8_t b_TnrRequested = 0;
9810         cam_denoise_param_t tnr;
9811         tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
9812         tnr.process_plates =
9813             (cam_denoise_process_type_t)frame_settings.find(
9814             QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
9815         b_TnrRequested = tnr.denoise_enable;
9816         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
9817             rc = BAD_VALUE;
9818         }
9819     }
9820 
9821     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
9822         int32_t fwk_testPatternMode =
9823                 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
9824         int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
9825                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
9826 
9827         if (NAME_NOT_FOUND != testPatternMode) {
9828             cam_test_pattern_data_t testPatternData;
9829             memset(&testPatternData, 0, sizeof(testPatternData));
9830             testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
9831             if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
9832                     frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
9833                 int32_t *fwk_testPatternData =
9834                         frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
9835                 testPatternData.r = fwk_testPatternData[0];
9836                 testPatternData.b = fwk_testPatternData[3];
9837                 switch (gCamCapability[mCameraId]->color_arrangement) {
9838                     case CAM_FILTER_ARRANGEMENT_RGGB:
9839                     case CAM_FILTER_ARRANGEMENT_GRBG:
9840                         testPatternData.gr = fwk_testPatternData[1];
9841                         testPatternData.gb = fwk_testPatternData[2];
9842                         break;
9843                     case CAM_FILTER_ARRANGEMENT_GBRG:
9844                     case CAM_FILTER_ARRANGEMENT_BGGR:
9845                         testPatternData.gr = fwk_testPatternData[2];
9846                         testPatternData.gb = fwk_testPatternData[1];
9847                         break;
9848                     default:
9849                         LOGE("color arrangement %d is not supported",
9850                                 gCamCapability[mCameraId]->color_arrangement);
9851                         break;
9852                 }
9853             }
9854             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
9855                     testPatternData)) {
9856                 rc = BAD_VALUE;
9857             }
9858         } else {
9859             LOGE("Invalid framework sensor test pattern mode %d",
9860                     fwk_testPatternMode);
9861         }
9862     }
9863 
9864     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
9865         size_t count = 0;
9866         camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
9867         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
9868                 gps_coords.data.d, gps_coords.count, count);
9869         if (gps_coords.count != count) {
9870             rc = BAD_VALUE;
9871         }
9872     }
9873 
9874     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
9875         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
9876         size_t count = 0;
9877         const char *gps_methods_src = (const char *)
9878                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
9879         memset(gps_methods, '\0', sizeof(gps_methods));
9880         strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
9881         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
9882                 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
9883         if (GPS_PROCESSING_METHOD_SIZE != count) {
9884             rc = BAD_VALUE;
9885         }
9886     }
9887 
9888     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
9889         int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
9890         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
9891                 gps_timestamp)) {
9892             rc = BAD_VALUE;
9893         }
9894     }
9895 
9896     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9897         int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9898         cam_rotation_info_t rotation_info;
9899         if (orientation == 0) {
9900            rotation_info.rotation = ROTATE_0;
9901         } else if (orientation == 90) {
9902            rotation_info.rotation = ROTATE_90;
9903         } else if (orientation == 180) {
9904            rotation_info.rotation = ROTATE_180;
9905         } else if (orientation == 270) {
9906            rotation_info.rotation = ROTATE_270;
9907         }
9908         rotation_info.device_rotation = ROTATE_0;
9909         rotation_info.streamId = snapshotStreamId;
9910         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
9911         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
9912             rc = BAD_VALUE;
9913         }
9914     }
9915 
9916     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
9917         uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
9918         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
9919             rc = BAD_VALUE;
9920         }
9921     }
9922 
9923     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
9924         uint32_t thumb_quality = (uint32_t)
9925                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
9926         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
9927                 thumb_quality)) {
9928             rc = BAD_VALUE;
9929         }
9930     }
9931 
9932     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9933         cam_dimension_t dim;
9934         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9935         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9936         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
9937             rc = BAD_VALUE;
9938         }
9939     }
9940 
9941     // Internal metadata
9942     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
9943         size_t count = 0;
9944         camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
9945         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
9946                 privatedata.data.i32, privatedata.count, count);
9947         if (privatedata.count != count) {
9948             rc = BAD_VALUE;
9949         }
9950     }
9951 
9952     // EV step
9953     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
9954             gCamCapability[mCameraId]->exp_compensation_step)) {
9955         rc = BAD_VALUE;
9956     }
9957 
9958     // CDS info
9959     if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
9960         cam_cds_data_t *cdsData = (cam_cds_data_t *)
9961                 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
9962 
9963         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9964                 CAM_INTF_META_CDS_DATA, *cdsData)) {
9965             rc = BAD_VALUE;
9966         }
9967     }
9968 
9969     // Hybrid AE
9970     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
9971         uint8_t *hybrid_ae = (uint8_t *)
9972                 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
9973 
9974         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9975                 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
9976             rc = BAD_VALUE;
9977         }
9978     }
9979 
9980     return rc;
9981 }
9982 
9983 /*===========================================================================
9984  * FUNCTION   : captureResultCb
9985  *
9986  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
9987  *
9988  * PARAMETERS :
9989  *   @frame  : frame information from mm-camera-interface
9990  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
9991  *   @userdata: userdata
9992  *
9993  * RETURN     : NONE
9994  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer,void * userdata)9995 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
9996                 camera3_stream_buffer_t *buffer,
9997                 uint32_t frame_number, bool isInputBuffer, void *userdata)
9998 {
9999     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
10000     if (hw == NULL) {
10001         LOGE("Invalid hw %p", hw);
10002         return;
10003     }
10004 
10005     hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
10006     return;
10007 }
10008 
10009 /*===========================================================================
10010  * FUNCTION   : setBufferErrorStatus
10011  *
10012  * DESCRIPTION: Callback handler for channels to report any buffer errors
10013  *
10014  * PARAMETERS :
10015  *   @ch     : Channel on which buffer error is reported from
10016  *   @frame_number  : frame number on which buffer error is reported on
10017  *   @buffer_status : buffer error status
10018  *   @userdata: userdata
10019  *
10020  * RETURN     : NONE
10021  *==========================================================================*/
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frame_number,camera3_buffer_status_t err,void * userdata)10022 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
10023                 uint32_t frame_number, camera3_buffer_status_t err,
10024                 void *userdata)
10025 {
10026     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
10027     if (hw == NULL) {
10028         LOGE("Invalid hw %p", hw);
10029         return;
10030     }
10031 
10032     hw->setBufferErrorStatus(ch, frame_number, err);
10033     return;
10034 }
10035 
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frameNumber,camera3_buffer_status_t err)10036 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
10037                 uint32_t frameNumber, camera3_buffer_status_t err)
10038 {
10039     LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
10040     pthread_mutex_lock(&mMutex);
10041 
10042     for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
10043         if (req.frame_number != frameNumber)
10044             continue;
10045         for (auto& k : req.mPendingBufferList) {
10046             if(k.stream->priv == ch) {
10047                 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
10048             }
10049         }
10050     }
10051 
10052     pthread_mutex_unlock(&mMutex);
10053     return;
10054 }
10055 /*===========================================================================
10056  * FUNCTION   : initialize
10057  *
10058  * DESCRIPTION: Pass framework callback pointers to HAL
10059  *
10060  * PARAMETERS :
10061  *
10062  *
10063  * RETURN     : Success : 0
10064  *              Failure: -ENODEV
10065  *==========================================================================*/
10066 
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)10067 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
10068                                   const camera3_callback_ops_t *callback_ops)
10069 {
10070     LOGD("E");
10071     QCamera3HardwareInterface *hw =
10072         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10073     if (!hw) {
10074         LOGE("NULL camera device");
10075         return -ENODEV;
10076     }
10077 
10078     int rc = hw->initialize(callback_ops);
10079     LOGD("X");
10080     return rc;
10081 }
10082 
10083 /*===========================================================================
10084  * FUNCTION   : configure_streams
10085  *
10086  * DESCRIPTION:
10087  *
10088  * PARAMETERS :
10089  *
10090  *
10091  * RETURN     : Success: 0
10092  *              Failure: -EINVAL (if stream configuration is invalid)
10093  *                       -ENODEV (fatal error)
10094  *==========================================================================*/
10095 
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)10096 int QCamera3HardwareInterface::configure_streams(
10097         const struct camera3_device *device,
10098         camera3_stream_configuration_t *stream_list)
10099 {
10100     LOGD("E");
10101     QCamera3HardwareInterface *hw =
10102         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10103     if (!hw) {
10104         LOGE("NULL camera device");
10105         return -ENODEV;
10106     }
10107     int rc = hw->configureStreams(stream_list);
10108     LOGD("X");
10109     return rc;
10110 }
10111 
10112 /*===========================================================================
10113  * FUNCTION   : construct_default_request_settings
10114  *
10115  * DESCRIPTION: Configure a settings buffer to meet the required use case
10116  *
10117  * PARAMETERS :
10118  *
10119  *
10120  * RETURN     : Success: Return valid metadata
10121  *              Failure: Return NULL
10122  *==========================================================================*/
10123 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)10124     construct_default_request_settings(const struct camera3_device *device,
10125                                         int type)
10126 {
10127 
10128     LOGD("E");
10129     camera_metadata_t* fwk_metadata = NULL;
10130     QCamera3HardwareInterface *hw =
10131         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10132     if (!hw) {
10133         LOGE("NULL camera device");
10134         return NULL;
10135     }
10136 
10137     fwk_metadata = hw->translateCapabilityToMetadata(type);
10138 
10139     LOGD("X");
10140     return fwk_metadata;
10141 }
10142 
10143 /*===========================================================================
10144  * FUNCTION   : process_capture_request
10145  *
10146  * DESCRIPTION:
10147  *
10148  * PARAMETERS :
10149  *
10150  *
10151  * RETURN     :
10152  *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)10153 int QCamera3HardwareInterface::process_capture_request(
10154                     const struct camera3_device *device,
10155                     camera3_capture_request_t *request)
10156 {
10157     LOGD("E");
10158     QCamera3HardwareInterface *hw =
10159         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10160     if (!hw) {
10161         LOGE("NULL camera device");
10162         return -EINVAL;
10163     }
10164 
10165     int rc = hw->processCaptureRequest(request);
10166     LOGD("X");
10167     return rc;
10168 }
10169 
10170 /*===========================================================================
10171  * FUNCTION   : dump
10172  *
10173  * DESCRIPTION:
10174  *
10175  * PARAMETERS :
10176  *
10177  *
10178  * RETURN     :
10179  *==========================================================================*/
10180 
dump(const struct camera3_device * device,int fd)10181 void QCamera3HardwareInterface::dump(
10182                 const struct camera3_device *device, int fd)
10183 {
10184     /* Log level property is read when "adb shell dumpsys media.camera" is
10185        called so that the log level can be controlled without restarting
10186        the media server */
10187     getLogLevel();
10188 
10189     LOGD("E");
10190     QCamera3HardwareInterface *hw =
10191         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10192     if (!hw) {
10193         LOGE("NULL camera device");
10194         return;
10195     }
10196 
10197     hw->dump(fd);
10198     LOGD("X");
10199     return;
10200 }
10201 
10202 /*===========================================================================
10203  * FUNCTION   : flush
10204  *
10205  * DESCRIPTION:
10206  *
10207  * PARAMETERS :
10208  *
10209  *
10210  * RETURN     :
10211  *==========================================================================*/
10212 
flush(const struct camera3_device * device)10213 int QCamera3HardwareInterface::flush(
10214                 const struct camera3_device *device)
10215 {
10216     int rc;
10217     LOGD("E");
10218     QCamera3HardwareInterface *hw =
10219         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10220     if (!hw) {
10221         LOGE("NULL camera device");
10222         return -EINVAL;
10223     }
10224 
10225     pthread_mutex_lock(&hw->mMutex);
10226     // Validate current state
10227     switch (hw->mState) {
10228         case STARTED:
10229             /* valid state */
10230             break;
10231 
10232         case ERROR:
10233             pthread_mutex_unlock(&hw->mMutex);
10234             hw->handleCameraDeviceError();
10235             return -ENODEV;
10236 
10237         default:
10238             LOGI("Flush returned during state %d", hw->mState);
10239             pthread_mutex_unlock(&hw->mMutex);
10240             return 0;
10241     }
10242     pthread_mutex_unlock(&hw->mMutex);
10243 
10244     rc = hw->flush(true /* restart channels */ );
10245     LOGD("X");
10246     return rc;
10247 }
10248 
10249 /*===========================================================================
10250  * FUNCTION   : close_camera_device
10251  *
10252  * DESCRIPTION:
10253  *
10254  * PARAMETERS :
10255  *
10256  *
10257  * RETURN     :
10258  *==========================================================================*/
close_camera_device(struct hw_device_t * device)10259 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
10260 {
10261     int ret = NO_ERROR;
10262     QCamera3HardwareInterface *hw =
10263         reinterpret_cast<QCamera3HardwareInterface *>(
10264             reinterpret_cast<camera3_device_t *>(device)->priv);
10265     if (!hw) {
10266         LOGE("NULL camera device");
10267         return BAD_VALUE;
10268     }
10269 
10270     LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
10271     delete hw;
10272     LOGI("[KPI Perf]: X");
10273     return ret;
10274 }
10275 
10276 /*===========================================================================
10277  * FUNCTION   : getWaveletDenoiseProcessPlate
10278  *
10279  * DESCRIPTION: query wavelet denoise process plate
10280  *
10281  * PARAMETERS : None
10282  *
10283  * RETURN     : WNR prcocess plate value
10284  *==========================================================================*/
getWaveletDenoiseProcessPlate()10285 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
10286 {
10287     char prop[PROPERTY_VALUE_MAX];
10288     memset(prop, 0, sizeof(prop));
10289     property_get("persist.denoise.process.plates", prop, "0");
10290     int processPlate = atoi(prop);
10291     switch(processPlate) {
10292     case 0:
10293         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10294     case 1:
10295         return CAM_WAVELET_DENOISE_CBCR_ONLY;
10296     case 2:
10297         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10298     case 3:
10299         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10300     default:
10301         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10302     }
10303 }
10304 
10305 
10306 /*===========================================================================
10307  * FUNCTION   : getTemporalDenoiseProcessPlate
10308  *
10309  * DESCRIPTION: query temporal denoise process plate
10310  *
10311  * PARAMETERS : None
10312  *
10313  * RETURN     : TNR prcocess plate value
10314  *==========================================================================*/
getTemporalDenoiseProcessPlate()10315 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
10316 {
10317     char prop[PROPERTY_VALUE_MAX];
10318     memset(prop, 0, sizeof(prop));
10319     property_get("persist.tnr.process.plates", prop, "0");
10320     int processPlate = atoi(prop);
10321     switch(processPlate) {
10322     case 0:
10323         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10324     case 1:
10325         return CAM_WAVELET_DENOISE_CBCR_ONLY;
10326     case 2:
10327         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10328     case 3:
10329         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10330     default:
10331         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10332     }
10333 }
10334 
10335 
10336 /*===========================================================================
10337  * FUNCTION   : extractSceneMode
10338  *
10339  * DESCRIPTION: Extract scene mode from frameworks set metadata
10340  *
10341  * PARAMETERS :
10342  *      @frame_settings: CameraMetadata reference
10343  *      @metaMode: ANDROID_CONTORL_MODE
10344  *      @hal_metadata: hal metadata structure
10345  *
10346  * RETURN     : None
10347  *==========================================================================*/
extractSceneMode(const CameraMetadata & frame_settings,uint8_t metaMode,metadata_buffer_t * hal_metadata)10348 int32_t QCamera3HardwareInterface::extractSceneMode(
10349         const CameraMetadata &frame_settings, uint8_t metaMode,
10350         metadata_buffer_t *hal_metadata)
10351 {
10352     int32_t rc = NO_ERROR;
10353 
10354     if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
10355         camera_metadata_ro_entry entry =
10356                 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
10357         if (0 == entry.count)
10358             return rc;
10359 
10360         uint8_t fwk_sceneMode = entry.data.u8[0];
10361 
10362         int val = lookupHalName(SCENE_MODES_MAP,
10363                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
10364                 fwk_sceneMode);
10365         if (NAME_NOT_FOUND != val) {
10366             uint8_t sceneMode = (uint8_t)val;
10367             LOGD("sceneMode: %d", sceneMode);
10368             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10369                     CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10370                 rc = BAD_VALUE;
10371             }
10372         }
10373     } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
10374             (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
10375         uint8_t sceneMode = CAM_SCENE_MODE_OFF;
10376         LOGD("sceneMode: %d", sceneMode);
10377         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10378                 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10379             rc = BAD_VALUE;
10380         }
10381     }
10382     return rc;
10383 }
10384 
10385 /*===========================================================================
10386  * FUNCTION   : needRotationReprocess
10387  *
10388  * DESCRIPTION: if rotation needs to be done by reprocess in pp
10389  *
10390  * PARAMETERS : none
10391  *
10392  * RETURN     : true: needed
10393  *              false: no need
10394  *==========================================================================*/
needRotationReprocess()10395 bool QCamera3HardwareInterface::needRotationReprocess()
10396 {
10397     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
10398         // current rotation is not zero, and pp has the capability to process rotation
10399         LOGH("need do reprocess for rotation");
10400         return true;
10401     }
10402 
10403     return false;
10404 }
10405 
10406 /*===========================================================================
10407  * FUNCTION   : needReprocess
10408  *
10409  * DESCRIPTION: if reprocess in needed
10410  *
10411  * PARAMETERS : none
10412  *
10413  * RETURN     : true: needed
10414  *              false: no need
10415  *==========================================================================*/
needReprocess(cam_feature_mask_t postprocess_mask)10416 bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
10417 {
10418     if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
10419         // TODO: add for ZSL HDR later
10420         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
10421         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
10422             LOGH("need do reprocess for ZSL WNR or min PP reprocess");
10423             return true;
10424         } else {
10425             LOGH("already post processed frame");
10426             return false;
10427         }
10428     }
10429     return needRotationReprocess();
10430 }
10431 
10432 /*===========================================================================
10433  * FUNCTION   : needJpegExifRotation
10434  *
10435  * DESCRIPTION: if rotation from jpeg is needed
10436  *
10437  * PARAMETERS : none
10438  *
10439  * RETURN     : true: needed
10440  *              false: no need
10441  *==========================================================================*/
needJpegExifRotation()10442 bool QCamera3HardwareInterface::needJpegExifRotation()
10443 {
10444    /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
10445     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10446        LOGD("Need use Jpeg EXIF Rotation");
10447        return true;
10448     }
10449     return false;
10450 }
10451 
10452 /*===========================================================================
10453  * FUNCTION   : addOfflineReprocChannel
10454  *
10455  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
10456  *              coming from input channel
10457  *
10458  * PARAMETERS :
10459  *   @config  : reprocess configuration
10460  *   @inputChHandle : pointer to the input (source) channel
10461  *
10462  *
10463  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
10464  *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3ProcessingChannel * inputChHandle)10465 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
10466         const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
10467 {
10468     int32_t rc = NO_ERROR;
10469     QCamera3ReprocessChannel *pChannel = NULL;
10470 
10471     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
10472             mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
10473             config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
10474     if (NULL == pChannel) {
10475         LOGE("no mem for reprocess channel");
10476         return NULL;
10477     }
10478 
10479     rc = pChannel->initialize(IS_TYPE_NONE);
10480     if (rc != NO_ERROR) {
10481         LOGE("init reprocess channel failed, ret = %d", rc);
10482         delete pChannel;
10483         return NULL;
10484     }
10485 
10486     // pp feature config
10487     cam_pp_feature_config_t pp_config;
10488     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
10489 
10490     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
10491     if (gCamCapability[mCameraId]->qcom_supported_feature_mask
10492             & CAM_QCOM_FEATURE_DSDN) {
10493         //Use CPP CDS incase h/w supports it.
10494         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
10495         pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
10496     }
10497     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10498         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
10499     }
10500 
10501     rc = pChannel->addReprocStreamsFromSource(pp_config,
10502             config,
10503             IS_TYPE_NONE,
10504             mMetadataChannel);
10505 
10506     if (rc != NO_ERROR) {
10507         delete pChannel;
10508         return NULL;
10509     }
10510     return pChannel;
10511 }
10512 
10513 /*===========================================================================
10514  * FUNCTION   : getMobicatMask
10515  *
10516  * DESCRIPTION: returns mobicat mask
10517  *
10518  * PARAMETERS : none
10519  *
10520  * RETURN     : mobicat mask
10521  *
10522  *==========================================================================*/
getMobicatMask()10523 uint8_t QCamera3HardwareInterface::getMobicatMask()
10524 {
10525     return m_MobicatMask;
10526 }
10527 
10528 /*===========================================================================
10529  * FUNCTION   : setMobicat
10530  *
10531  * DESCRIPTION: set Mobicat on/off.
10532  *
10533  * PARAMETERS :
10534  *   @params  : none
10535  *
10536  * RETURN     : int32_t type of status
10537  *              NO_ERROR  -- success
10538  *              none-zero failure code
10539  *==========================================================================*/
setMobicat()10540 int32_t QCamera3HardwareInterface::setMobicat()
10541 {
10542     char value [PROPERTY_VALUE_MAX];
10543     property_get("persist.camera.mobicat", value, "0");
10544     int32_t ret = NO_ERROR;
10545     uint8_t enableMobi = (uint8_t)atoi(value);
10546 
10547     if (enableMobi) {
10548         tune_cmd_t tune_cmd;
10549         tune_cmd.type = SET_RELOAD_CHROMATIX;
10550         tune_cmd.module = MODULE_ALL;
10551         tune_cmd.value = TRUE;
10552         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10553                 CAM_INTF_PARM_SET_VFE_COMMAND,
10554                 tune_cmd);
10555 
10556         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10557                 CAM_INTF_PARM_SET_PP_COMMAND,
10558                 tune_cmd);
10559     }
10560     m_MobicatMask = enableMobi;
10561 
10562     return ret;
10563 }
10564 
10565 /*===========================================================================
10566 * FUNCTION   : getLogLevel
10567 *
10568 * DESCRIPTION: Reads the log level property into a variable
10569 *
10570 * PARAMETERS :
10571 *   None
10572 *
10573 * RETURN     :
10574 *   None
10575 *==========================================================================*/
getLogLevel()10576 void QCamera3HardwareInterface::getLogLevel()
10577 {
10578     char prop[PROPERTY_VALUE_MAX];
10579     uint32_t globalLogLevel = 0;
10580 
10581     property_get("persist.camera.hal.debug", prop, "0");
10582     int val = atoi(prop);
10583     if (0 <= val) {
10584         gCamHal3LogLevel = (uint32_t)val;
10585     }
10586 
10587     property_get("persist.camera.kpi.debug", prop, "1");
10588     gKpiDebugLevel = atoi(prop);
10589 
10590     property_get("persist.camera.global.debug", prop, "0");
10591     val = atoi(prop);
10592     if (0 <= val) {
10593         globalLogLevel = (uint32_t)val;
10594     }
10595 
10596     /* Highest log level among hal.logs and global.logs is selected */
10597     if (gCamHal3LogLevel < globalLogLevel)
10598         gCamHal3LogLevel = globalLogLevel;
10599 
10600     return;
10601 }
10602 
10603 /*===========================================================================
10604  * FUNCTION   : validateStreamRotations
10605  *
10606  * DESCRIPTION: Check if the rotations requested are supported
10607  *
10608  * PARAMETERS :
10609  *   @stream_list : streams to be configured
10610  *
10611  * RETURN     : NO_ERROR on success
10612  *              -EINVAL on failure
10613  *
10614  *==========================================================================*/
validateStreamRotations(camera3_stream_configuration_t * streamList)10615 int QCamera3HardwareInterface::validateStreamRotations(
10616         camera3_stream_configuration_t *streamList)
10617 {
10618     int rc = NO_ERROR;
10619 
10620     /*
10621     * Loop through all streams requested in configuration
10622     * Check if unsupported rotations have been requested on any of them
10623     */
10624     for (size_t j = 0; j < streamList->num_streams; j++){
10625         camera3_stream_t *newStream = streamList->streams[j];
10626 
10627         bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
10628         bool isImplDef = (newStream->format ==
10629                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
10630         bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
10631                 isImplDef);
10632 
10633         if (isRotated && (!isImplDef || isZsl)) {
10634             LOGE("Error: Unsupported rotation of %d requested for stream"
10635                     "type:%d and stream format:%d",
10636                     newStream->rotation, newStream->stream_type,
10637                     newStream->format);
10638             rc = -EINVAL;
10639             break;
10640         }
10641     }
10642 
10643     return rc;
10644 }
10645 
10646 /*===========================================================================
10647 * FUNCTION   : getFlashInfo
10648 *
10649 * DESCRIPTION: Retrieve information about whether the device has a flash.
10650 *
10651 * PARAMETERS :
10652 *   @cameraId  : Camera id to query
10653 *   @hasFlash  : Boolean indicating whether there is a flash device
10654 *                associated with given camera
10655 *   @flashNode : If a flash device exists, this will be its device node.
10656 *
10657 * RETURN     :
10658 *   None
10659 *==========================================================================*/
getFlashInfo(const int cameraId,bool & hasFlash,char (& flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])10660 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
10661         bool& hasFlash,
10662         char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
10663 {
10664     cam_capability_t* camCapability = gCamCapability[cameraId];
10665     if (NULL == camCapability) {
10666         hasFlash = false;
10667         flashNode[0] = '\0';
10668     } else {
10669         hasFlash = camCapability->flash_available;
10670         strlcpy(flashNode,
10671                 (char*)camCapability->flash_dev_name,
10672                 QCAMERA_MAX_FILEPATH_LENGTH);
10673     }
10674 }
10675 
10676 /*===========================================================================
10677 * FUNCTION   : getEepromVersionInfo
10678 *
10679 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
10680 *
10681 * PARAMETERS : None
10682 *
10683 * RETURN     : string describing EEPROM version
10684 *              "\0" if no such info available
10685 *==========================================================================*/
getEepromVersionInfo()10686 const char *QCamera3HardwareInterface::getEepromVersionInfo()
10687 {
10688     return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
10689 }
10690 
10691 /*===========================================================================
10692 * FUNCTION   : getLdafCalib
10693 *
10694 * DESCRIPTION: Retrieve Laser AF calibration data
10695 *
10696 * PARAMETERS : None
10697 *
10698 * RETURN     : Two uint32_t describing laser AF calibration data
10699 *              NULL if none is available.
10700 *==========================================================================*/
getLdafCalib()10701 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
10702 {
10703     if (mLdafCalibExist) {
10704         return &mLdafCalib[0];
10705     } else {
10706         return NULL;
10707     }
10708 }
10709 
10710 /*===========================================================================
10711  * FUNCTION   : dynamicUpdateMetaStreamInfo
10712  *
10713  * DESCRIPTION: This function:
10714  *             (1) stops all the channels
10715  *             (2) returns error on pending requests and buffers
10716  *             (3) sends metastream_info in setparams
10717  *             (4) starts all channels
10718  *             This is useful when sensor has to be restarted to apply any
10719  *             settings such as frame rate from a different sensor mode
10720  *
10721  * PARAMETERS : None
10722  *
10723  * RETURN     : NO_ERROR on success
10724  *              Error codes on failure
10725  *
10726  *==========================================================================*/
dynamicUpdateMetaStreamInfo()10727 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
10728 {
10729     ATRACE_CALL();
10730     int rc = NO_ERROR;
10731 
10732     LOGD("E");
10733 
10734     rc = stopAllChannels();
10735     if (rc < 0) {
10736         LOGE("stopAllChannels failed");
10737         return rc;
10738     }
10739 
10740     rc = notifyErrorForPendingRequests();
10741     if (rc < 0) {
10742         LOGE("notifyErrorForPendingRequests failed");
10743         return rc;
10744     }
10745 
10746     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
10747         LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
10748                 "Format:%d",
10749                 mStreamConfigInfo.type[i],
10750                 mStreamConfigInfo.stream_sizes[i].width,
10751                 mStreamConfigInfo.stream_sizes[i].height,
10752                 mStreamConfigInfo.postprocess_mask[i],
10753                 mStreamConfigInfo.format[i]);
10754     }
10755 
10756     /* Send meta stream info once again so that ISP can start */
10757     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10758             CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
10759     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
10760             mParameters);
10761     if (rc < 0) {
10762         LOGE("set Metastreaminfo failed. Sensor mode does not change");
10763     }
10764 
10765     rc = startAllChannels();
10766     if (rc < 0) {
10767         LOGE("startAllChannels failed");
10768         return rc;
10769     }
10770 
10771     LOGD("X");
10772     return rc;
10773 }
10774 
10775 /*===========================================================================
10776  * FUNCTION   : stopAllChannels
10777  *
10778  * DESCRIPTION: This function stops (equivalent to stream-off) all channels
10779  *
10780  * PARAMETERS : None
10781  *
10782  * RETURN     : NO_ERROR on success
10783  *              Error codes on failure
10784  *
10785  *==========================================================================*/
stopAllChannels()10786 int32_t QCamera3HardwareInterface::stopAllChannels()
10787 {
10788     int32_t rc = NO_ERROR;
10789 
10790     LOGD("Stopping all channels");
10791     // Stop the Streams/Channels
10792     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10793         it != mStreamInfo.end(); it++) {
10794         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10795         if (channel) {
10796             channel->stop();
10797         }
10798         (*it)->status = INVALID;
10799     }
10800 
10801     if (mSupportChannel) {
10802         mSupportChannel->stop();
10803     }
10804     if (mAnalysisChannel) {
10805         mAnalysisChannel->stop();
10806     }
10807     if (mRawDumpChannel) {
10808         mRawDumpChannel->stop();
10809     }
10810     if (mMetadataChannel) {
10811         /* If content of mStreamInfo is not 0, there is metadata stream */
10812         mMetadataChannel->stop();
10813     }
10814 
10815     LOGD("All channels stopped");
10816     return rc;
10817 }
10818 
10819 /*===========================================================================
10820  * FUNCTION   : startAllChannels
10821  *
10822  * DESCRIPTION: This function starts (equivalent to stream-on) all channels
10823  *
10824  * PARAMETERS : None
10825  *
10826  * RETURN     : NO_ERROR on success
10827  *              Error codes on failure
10828  *
10829  *==========================================================================*/
startAllChannels()10830 int32_t QCamera3HardwareInterface::startAllChannels()
10831 {
10832     int32_t rc = NO_ERROR;
10833 
10834     LOGD("Start all channels ");
10835     // Start the Streams/Channels
10836     if (mMetadataChannel) {
10837         /* If content of mStreamInfo is not 0, there is metadata stream */
10838         rc = mMetadataChannel->start();
10839         if (rc < 0) {
10840             LOGE("META channel start failed");
10841             return rc;
10842         }
10843     }
10844     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10845         it != mStreamInfo.end(); it++) {
10846         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10847         if (channel) {
10848             rc = channel->start();
10849             if (rc < 0) {
10850                 LOGE("channel start failed");
10851                 return rc;
10852             }
10853         }
10854     }
10855     if (mAnalysisChannel) {
10856         mAnalysisChannel->start();
10857     }
10858     if (mSupportChannel) {
10859         rc = mSupportChannel->start();
10860         if (rc < 0) {
10861             LOGE("Support channel start failed");
10862             return rc;
10863         }
10864     }
10865     if (mRawDumpChannel) {
10866         rc = mRawDumpChannel->start();
10867         if (rc < 0) {
10868             LOGE("RAW dump channel start failed");
10869             return rc;
10870         }
10871     }
10872 
10873     LOGD("All channels started");
10874     return rc;
10875 }
10876 
10877 /*===========================================================================
10878  * FUNCTION   : notifyErrorForPendingRequests
10879  *
10880  * DESCRIPTION: This function sends error for all the pending requests/buffers
10881  *
10882  * PARAMETERS : None
10883  *
10884  * RETURN     : Error codes
10885  *              NO_ERROR on success
10886  *
10887  *==========================================================================*/
notifyErrorForPendingRequests()10888 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
10889 {
10890     int32_t rc = NO_ERROR;
10891     unsigned int frameNum = 0;
10892     camera3_capture_result_t result;
10893     camera3_stream_buffer_t *pStream_Buf = NULL;
10894 
10895     memset(&result, 0, sizeof(camera3_capture_result_t));
10896 
10897     if (mPendingRequestsList.size() > 0) {
10898         pendingRequestIterator i = mPendingRequestsList.begin();
10899         frameNum = i->frame_number;
10900     } else {
10901         /* There might still be pending buffers even though there are
10902          no pending requests. Setting the frameNum to MAX so that
10903          all the buffers with smaller frame numbers are returned */
10904         frameNum = UINT_MAX;
10905     }
10906 
10907     LOGH("Oldest frame num on mPendingRequestsList = %u",
10908        frameNum);
10909 
10910     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
10911             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
10912 
10913         if (req->frame_number < frameNum) {
10914             // Send Error notify to frameworks for each buffer for which
10915             // metadata buffer is already sent
10916             LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
10917                 req->frame_number, req->mPendingBufferList.size());
10918 
10919             pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10920             if (NULL == pStream_Buf) {
10921                 LOGE("No memory for pending buffers array");
10922                 return NO_MEMORY;
10923             }
10924             memset(pStream_Buf, 0,
10925                 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10926             result.result = NULL;
10927             result.frame_number = req->frame_number;
10928             result.num_output_buffers = req->mPendingBufferList.size();
10929             result.output_buffers = pStream_Buf;
10930 
10931             size_t index = 0;
10932             for (auto info = req->mPendingBufferList.begin();
10933                 info != req->mPendingBufferList.end(); ) {
10934 
10935                 camera3_notify_msg_t notify_msg;
10936                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10937                 notify_msg.type = CAMERA3_MSG_ERROR;
10938                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
10939                 notify_msg.message.error.error_stream = info->stream;
10940                 notify_msg.message.error.frame_number = req->frame_number;
10941                 pStream_Buf[index].acquire_fence = -1;
10942                 pStream_Buf[index].release_fence = -1;
10943                 pStream_Buf[index].buffer = info->buffer;
10944                 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10945                 pStream_Buf[index].stream = info->stream;
10946                 mCallbackOps->notify(mCallbackOps, &notify_msg);
10947                 index++;
10948                 // Remove buffer from list
10949                 info = req->mPendingBufferList.erase(info);
10950             }
10951 
10952             // Remove this request from Map
10953             LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10954                 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10955             req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10956 
10957             mCallbackOps->process_capture_result(mCallbackOps, &result);
10958 
10959             delete [] pStream_Buf;
10960         } else {
10961 
10962             // Go through the pending requests info and send error request to framework
10963             LOGE("Sending ERROR REQUEST for all pending requests");
10964             pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
10965 
10966             LOGE("Sending ERROR REQUEST for frame %d", req->frame_number);
10967 
10968             // Send error notify to frameworks
10969             camera3_notify_msg_t notify_msg;
10970             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10971             notify_msg.type = CAMERA3_MSG_ERROR;
10972             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
10973             notify_msg.message.error.error_stream = NULL;
10974             notify_msg.message.error.frame_number = req->frame_number;
10975             mCallbackOps->notify(mCallbackOps, &notify_msg);
10976 
10977             pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10978             if (NULL == pStream_Buf) {
10979                 LOGE("No memory for pending buffers array");
10980                 return NO_MEMORY;
10981             }
10982             memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10983 
10984             result.result = NULL;
10985             result.frame_number = req->frame_number;
10986             result.input_buffer = i->input_buffer;
10987             result.num_output_buffers = req->mPendingBufferList.size();
10988             result.output_buffers = pStream_Buf;
10989 
10990             size_t index = 0;
10991             for (auto info = req->mPendingBufferList.begin();
10992                 info != req->mPendingBufferList.end(); ) {
10993                 pStream_Buf[index].acquire_fence = -1;
10994                 pStream_Buf[index].release_fence = -1;
10995                 pStream_Buf[index].buffer = info->buffer;
10996                 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10997                 pStream_Buf[index].stream = info->stream;
10998                 index++;
10999                 // Remove buffer from list
11000                 info = req->mPendingBufferList.erase(info);
11001             }
11002 
11003             // Remove this request from Map
11004             LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
11005                 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
11006             req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
11007 
11008             mCallbackOps->process_capture_result(mCallbackOps, &result);
11009             delete [] pStream_Buf;
11010             i = erasePendingRequest(i);
11011         }
11012     }
11013 
11014     /* Reset pending frame Drop list and requests list */
11015     mPendingFrameDropList.clear();
11016 
11017     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
11018         req.mPendingBufferList.clear();
11019     }
11020     mPendingBuffersMap.mPendingBuffersInRequest.clear();
11021     mPendingReprocessResultList.clear();
11022     LOGH("Cleared all the pending buffers ");
11023 
11024     return rc;
11025 }
11026 
isOnEncoder(const cam_dimension_t max_viewfinder_size,uint32_t width,uint32_t height)11027 bool QCamera3HardwareInterface::isOnEncoder(
11028         const cam_dimension_t max_viewfinder_size,
11029         uint32_t width, uint32_t height)
11030 {
11031     return (width > (uint32_t)max_viewfinder_size.width ||
11032             height > (uint32_t)max_viewfinder_size.height);
11033 }
11034 
11035 /*===========================================================================
11036  * FUNCTION   : setBundleInfo
11037  *
11038  * DESCRIPTION: Set bundle info for all streams that are bundle.
11039  *
11040  * PARAMETERS : None
11041  *
11042  * RETURN     : NO_ERROR on success
11043  *              Error codes on failure
11044  *==========================================================================*/
setBundleInfo()11045 int32_t QCamera3HardwareInterface::setBundleInfo()
11046 {
11047     int32_t rc = NO_ERROR;
11048 
11049     if (mChannelHandle) {
11050         cam_bundle_config_t bundleInfo;
11051         memset(&bundleInfo, 0, sizeof(bundleInfo));
11052         rc = mCameraHandle->ops->get_bundle_info(
11053                 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
11054         if (rc != NO_ERROR) {
11055             LOGE("get_bundle_info failed");
11056             return rc;
11057         }
11058         if (mAnalysisChannel) {
11059             mAnalysisChannel->setBundleInfo(bundleInfo);
11060         }
11061         if (mSupportChannel) {
11062             mSupportChannel->setBundleInfo(bundleInfo);
11063         }
11064         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
11065                 it != mStreamInfo.end(); it++) {
11066             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
11067             channel->setBundleInfo(bundleInfo);
11068         }
11069         if (mRawDumpChannel) {
11070             mRawDumpChannel->setBundleInfo(bundleInfo);
11071         }
11072     }
11073 
11074     return rc;
11075 }
11076 
11077 /*===========================================================================
11078  * FUNCTION   : get_num_overall_buffers
11079  *
11080  * DESCRIPTION: Estimate number of pending buffers across all requests.
11081  *
11082  * PARAMETERS : None
11083  *
11084  * RETURN     : Number of overall pending buffers
11085  *
11086  *==========================================================================*/
get_num_overall_buffers()11087 uint32_t PendingBuffersMap::get_num_overall_buffers()
11088 {
11089     uint32_t sum_buffers = 0;
11090     for (auto &req : mPendingBuffersInRequest) {
11091         sum_buffers += req.mPendingBufferList.size();
11092     }
11093     return sum_buffers;
11094 }
11095 
11096 /*===========================================================================
11097  * FUNCTION   : removeBuf
11098  *
11099  * DESCRIPTION: Remove a matching buffer from tracker.
11100  *
11101  * PARAMETERS : @buffer: image buffer for the callback
11102  *
11103  * RETURN     : None
11104  *
11105  *==========================================================================*/
removeBuf(buffer_handle_t * buffer)11106 void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
11107 {
11108     bool buffer_found = false;
11109     for (auto req = mPendingBuffersInRequest.begin();
11110             req != mPendingBuffersInRequest.end(); req++) {
11111         for (auto k = req->mPendingBufferList.begin();
11112                 k != req->mPendingBufferList.end(); k++ ) {
11113             if (k->buffer == buffer) {
11114                 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
11115                         req->frame_number, buffer);
11116                 k = req->mPendingBufferList.erase(k);
11117                 if (req->mPendingBufferList.empty()) {
11118                     // Remove this request from Map
11119                     req = mPendingBuffersInRequest.erase(req);
11120                 }
11121                 buffer_found = true;
11122                 break;
11123             }
11124         }
11125         if (buffer_found) {
11126             break;
11127         }
11128     }
11129     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
11130             get_num_overall_buffers());
11131 }
11132 
11133 /*===========================================================================
11134  * FUNCTION   : getBufErrStatus
11135  *
11136  * DESCRIPTION: get buffer error status
11137  *
11138  * PARAMETERS : @buffer: buffer handle
11139  *
11140  * RETURN     : None
11141  *
11142  *==========================================================================*/
getBufErrStatus(buffer_handle_t * buffer)11143 int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
11144 {
11145     for (auto& req : mPendingBuffersInRequest) {
11146         for (auto& k : req.mPendingBufferList) {
11147             if (k.buffer == buffer)
11148                 return k.bufStatus;
11149         }
11150     }
11151     return CAMERA3_BUFFER_STATUS_OK;
11152 }
11153 
11154 /*===========================================================================
11155  * FUNCTION   : setPAAFSupport
11156  *
11157  * DESCRIPTION: Set the preview-assisted auto focus support bit in
11158  *              feature mask according to stream type and filter
11159  *              arrangement
11160  *
11161  * PARAMETERS : @feature_mask: current feature mask, which may be modified
11162  *              @stream_type: stream type
11163  *              @filter_arrangement: filter arrangement
11164  *
11165  * RETURN     : None
11166  *==========================================================================*/
setPAAFSupport(cam_feature_mask_t & feature_mask,cam_stream_type_t stream_type,cam_color_filter_arrangement_t filter_arrangement)11167 void QCamera3HardwareInterface::setPAAFSupport(
11168         cam_feature_mask_t& feature_mask,
11169         cam_stream_type_t stream_type,
11170         cam_color_filter_arrangement_t filter_arrangement)
11171 {
11172     switch (filter_arrangement) {
11173     case CAM_FILTER_ARRANGEMENT_RGGB:
11174     case CAM_FILTER_ARRANGEMENT_GRBG:
11175     case CAM_FILTER_ARRANGEMENT_GBRG:
11176     case CAM_FILTER_ARRANGEMENT_BGGR:
11177         if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
11178                 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
11179             feature_mask |= CAM_QCOM_FEATURE_PAAF;
11180         }
11181         break;
11182     case CAM_FILTER_ARRANGEMENT_Y:
11183         if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
11184             feature_mask |= CAM_QCOM_FEATURE_PAAF;
11185         }
11186         break;
11187     default:
11188         break;
11189     }
11190     LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
11191             feature_mask, stream_type, filter_arrangement);
11192 
11193 
11194 }
11195 
11196 /*===========================================================================
11197  * FUNCTION   : adjustBlackLevelForCFA
11198  *
11199  * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
11200  *              of bayer CFA (Color Filter Array).
11201  *
11202  * PARAMETERS : @input: black level pattern in the order of RGGB
11203  *              @output: black level pattern in the order of CFA
11204  *              @color_arrangement: CFA color arrangement
11205  *
11206  * RETURN     : None
11207  *==========================================================================*/
11208 template<typename T>
adjustBlackLevelForCFA(T input[BLACK_LEVEL_PATTERN_CNT],T output[BLACK_LEVEL_PATTERN_CNT],cam_color_filter_arrangement_t color_arrangement)11209 void QCamera3HardwareInterface::adjustBlackLevelForCFA(
11210         T input[BLACK_LEVEL_PATTERN_CNT],
11211         T output[BLACK_LEVEL_PATTERN_CNT],
11212         cam_color_filter_arrangement_t color_arrangement)
11213 {
11214     switch (color_arrangement) {
11215     case CAM_FILTER_ARRANGEMENT_GRBG:
11216         output[0] = input[1];
11217         output[1] = input[0];
11218         output[2] = input[3];
11219         output[3] = input[2];
11220         break;
11221     case CAM_FILTER_ARRANGEMENT_GBRG:
11222         output[0] = input[2];
11223         output[1] = input[3];
11224         output[2] = input[0];
11225         output[3] = input[1];
11226         break;
11227     case CAM_FILTER_ARRANGEMENT_BGGR:
11228         output[0] = input[3];
11229         output[1] = input[2];
11230         output[2] = input[1];
11231         output[3] = input[0];
11232         break;
11233     case CAM_FILTER_ARRANGEMENT_RGGB:
11234         output[0] = input[0];
11235         output[1] = input[1];
11236         output[2] = input[2];
11237         output[3] = input[3];
11238         break;
11239     default:
11240         LOGE("Invalid color arrangement to derive dynamic blacklevel");
11241         break;
11242     }
11243 }
11244 
11245 /*===========================================================================
11246  * FUNCTION   : is60HzZone
11247  *
11248  * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
11249  *
11250  * PARAMETERS : None
11251  *
11252  * RETURN     : True if in 60Hz zone, False otherwise
11253  *==========================================================================*/
is60HzZone()11254 bool QCamera3HardwareInterface::is60HzZone()
11255 {
11256     time_t t = time(NULL);
11257     struct tm lt;
11258 
11259     struct tm* r = localtime_r(&t, &lt);
11260 
11261     if (r == NULL || lt.tm_gmtoff <=  -2*60*60 || lt.tm_gmtoff >= 8*60*60)
11262         return true;
11263     else
11264         return false;
11265 }
11266 }; //end namespace qcamera
11267